23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1826 #ifndef VMA_RECORDING_ENABLED
1827 #define VMA_RECORDING_ENABLED 0
1831 #define NOMINMAX // For windows.h
1835 #include <vulkan/vulkan.h>
1838 #if VMA_RECORDING_ENABLED
1839 #include <windows.h>
1845 #if !defined(VMA_VULKAN_VERSION)
1846 #if defined(VK_VERSION_1_1)
1847 #define VMA_VULKAN_VERSION 1001000
1849 #define VMA_VULKAN_VERSION 1000000
1853 #if !defined(VMA_DEDICATED_ALLOCATION)
1854 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1855 #define VMA_DEDICATED_ALLOCATION 1
1857 #define VMA_DEDICATED_ALLOCATION 0
1861 #if !defined(VMA_BIND_MEMORY2)
1862 #if VK_KHR_bind_memory2
1863 #define VMA_BIND_MEMORY2 1
1865 #define VMA_BIND_MEMORY2 0
1869 #if !defined(VMA_MEMORY_BUDGET)
1870 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1871 #define VMA_MEMORY_BUDGET 1
1873 #define VMA_MEMORY_BUDGET 0
1882 #ifndef VMA_CALL_PRE
1883 #define VMA_CALL_PRE
1885 #ifndef VMA_CALL_POST
1886 #define VMA_CALL_POST
1903 uint32_t memoryType,
1904 VkDeviceMemory memory,
1909 uint32_t memoryType,
1910 VkDeviceMemory memory,
2031 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2032 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
2033 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
2035 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2036 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
2037 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
2039 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2040 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2182 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2190 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2200 uint32_t memoryTypeIndex,
2201 VkMemoryPropertyFlags* pFlags);
2213 uint32_t frameIndex);
2309 #ifndef VMA_STATS_STRING_ENABLED
2310 #define VMA_STATS_STRING_ENABLED 1
2313 #if VMA_STATS_STRING_ENABLED
2320 char** ppStatsString,
2321 VkBool32 detailedMap);
2325 char* pStatsString);
2327 #endif // #if VMA_STATS_STRING_ENABLED
2579 uint32_t memoryTypeBits,
2581 uint32_t* pMemoryTypeIndex);
2597 const VkBufferCreateInfo* pBufferCreateInfo,
2599 uint32_t* pMemoryTypeIndex);
2615 const VkImageCreateInfo* pImageCreateInfo,
2617 uint32_t* pMemoryTypeIndex);
2789 size_t* pLostAllocationCount);
2816 const char** ppName);
2909 const VkMemoryRequirements* pVkMemoryRequirements,
2935 const VkMemoryRequirements* pVkMemoryRequirements,
2937 size_t allocationCount,
2982 size_t allocationCount,
2995 VkDeviceSize newSize);
3413 size_t allocationCount,
3414 VkBool32* pAllocationsChanged,
3448 VkDeviceSize allocationLocalOffset,
3482 VkDeviceSize allocationLocalOffset,
3514 const VkBufferCreateInfo* pBufferCreateInfo,
3539 const VkImageCreateInfo* pImageCreateInfo,
3565 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3568 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3569 #define VMA_IMPLEMENTATION
3572 #ifdef VMA_IMPLEMENTATION
3573 #undef VMA_IMPLEMENTATION
3596 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3597 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3609 #if VMA_USE_STL_CONTAINERS
3610 #define VMA_USE_STL_VECTOR 1
3611 #define VMA_USE_STL_UNORDERED_MAP 1
3612 #define VMA_USE_STL_LIST 1
3615 #ifndef VMA_USE_STL_SHARED_MUTEX
3617 #if __cplusplus >= 201703L
3618 #define VMA_USE_STL_SHARED_MUTEX 1
3622 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3623 #define VMA_USE_STL_SHARED_MUTEX 1
3625 #define VMA_USE_STL_SHARED_MUTEX 0
3633 #if VMA_USE_STL_VECTOR
3637 #if VMA_USE_STL_UNORDERED_MAP
3638 #include <unordered_map>
3641 #if VMA_USE_STL_LIST
3650 #include <algorithm>
3655 #define VMA_NULL nullptr
3658 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3660 void *aligned_alloc(
size_t alignment,
size_t size)
3663 if(alignment <
sizeof(
void*))
3665 alignment =
sizeof(
void*);
3668 return memalign(alignment, size);
3670 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3672 void *aligned_alloc(
size_t alignment,
size_t size)
3675 if(alignment <
sizeof(
void*))
3677 alignment =
sizeof(
void*);
3681 if(posix_memalign(&pointer, alignment, size) == 0)
3695 #define VMA_ASSERT(expr)
3697 #define VMA_ASSERT(expr) assert(expr)
3703 #ifndef VMA_HEAVY_ASSERT
3705 #define VMA_HEAVY_ASSERT(expr)
3707 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3711 #ifndef VMA_ALIGN_OF
3712 #define VMA_ALIGN_OF(type) (__alignof(type))
3715 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3717 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3719 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3723 #ifndef VMA_SYSTEM_FREE
3725 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3727 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3732 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3736 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3740 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3744 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3747 #ifndef VMA_DEBUG_LOG
3748 #define VMA_DEBUG_LOG(format, ...)
3758 #if VMA_STATS_STRING_ENABLED
3759 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3761 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3763 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3765 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3767 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3769 snprintf(outStr, strLen,
"%p", ptr);
3777 void Lock() { m_Mutex.lock(); }
3778 void Unlock() { m_Mutex.unlock(); }
3779 bool TryLock() {
return m_Mutex.try_lock(); }
3783 #define VMA_MUTEX VmaMutex
3787 #ifndef VMA_RW_MUTEX
3788 #if VMA_USE_STL_SHARED_MUTEX
3790 #include <shared_mutex>
3794 void LockRead() { m_Mutex.lock_shared(); }
3795 void UnlockRead() { m_Mutex.unlock_shared(); }
3796 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
3797 void LockWrite() { m_Mutex.lock(); }
3798 void UnlockWrite() { m_Mutex.unlock(); }
3799 bool TryLockWrite() {
return m_Mutex.try_lock(); }
3801 std::shared_mutex m_Mutex;
3803 #define VMA_RW_MUTEX VmaRWMutex
3804 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3810 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3811 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3812 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3813 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
3814 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3815 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3816 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
3820 #define VMA_RW_MUTEX VmaRWMutex
3826 void LockRead() { m_Mutex.Lock(); }
3827 void UnlockRead() { m_Mutex.Unlock(); }
3828 bool TryLockRead() {
return m_Mutex.TryLock(); }
3829 void LockWrite() { m_Mutex.Lock(); }
3830 void UnlockWrite() { m_Mutex.Unlock(); }
3831 bool TryLockWrite() {
return m_Mutex.TryLock(); }
3835 #define VMA_RW_MUTEX VmaRWMutex
3836 #endif // #if VMA_USE_STL_SHARED_MUTEX
3837 #endif // #ifndef VMA_RW_MUTEX
3842 #ifndef VMA_ATOMIC_UINT32
3844 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3847 #ifndef VMA_ATOMIC_UINT64
3849 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3852 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3857 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3860 #ifndef VMA_DEBUG_ALIGNMENT
3865 #define VMA_DEBUG_ALIGNMENT (1)
3868 #ifndef VMA_DEBUG_MARGIN
3873 #define VMA_DEBUG_MARGIN (0)
3876 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3881 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3884 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3890 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3893 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3898 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3901 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3906 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3909 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3910 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3914 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3915 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3919 #ifndef VMA_CLASS_NO_COPY
3920 #define VMA_CLASS_NO_COPY(className) \
3922 className(const className&) = delete; \
3923 className& operator=(const className&) = delete;
3926 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3929 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3931 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3932 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3940 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
3941 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
3944 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3946 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3947 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3950 static inline uint32_t VmaCountBitsSet(uint32_t v)
3952 uint32_t c = v - ((v >> 1) & 0x55555555);
3953 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3954 c = ((c >> 4) + c) & 0x0F0F0F0F;
3955 c = ((c >> 8) + c) & 0x00FF00FF;
3956 c = ((c >> 16) + c) & 0x0000FFFF;
3962 template <
typename T>
3963 static inline T VmaAlignUp(T val, T align)
3965 return (val + align - 1) / align * align;
3969 template <
typename T>
3970 static inline T VmaAlignDown(T val, T align)
3972 return val / align * align;
3976 template <
typename T>
3977 static inline T VmaRoundDiv(T x, T y)
3979 return (x + (y / (T)2)) / y;
3987 template <
typename T>
3988 inline bool VmaIsPow2(T x)
3990 return (x & (x-1)) == 0;
3994 static inline uint32_t VmaNextPow2(uint32_t v)
4005 static inline uint64_t VmaNextPow2(uint64_t v)
4019 static inline uint32_t VmaPrevPow2(uint32_t v)
4029 static inline uint64_t VmaPrevPow2(uint64_t v)
4041 static inline bool VmaStrIsEmpty(
const char* pStr)
4043 return pStr == VMA_NULL || *pStr ==
'\0';
4046 #if VMA_STATS_STRING_ENABLED
4048 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4064 #endif // #if VMA_STATS_STRING_ENABLED
4068 template<
typename Iterator,
typename Compare>
4069 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4071 Iterator centerValue = end; --centerValue;
4072 Iterator insertIndex = beg;
4073 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4075 if(cmp(*memTypeIndex, *centerValue))
4077 if(insertIndex != memTypeIndex)
4079 VMA_SWAP(*memTypeIndex, *insertIndex);
4084 if(insertIndex != centerValue)
4086 VMA_SWAP(*insertIndex, *centerValue);
4091 template<
typename Iterator,
typename Compare>
4092 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4096 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4097 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4098 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4102 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4104 #endif // #ifndef VMA_SORT
4113 static inline bool VmaBlocksOnSamePage(
4114 VkDeviceSize resourceAOffset,
4115 VkDeviceSize resourceASize,
4116 VkDeviceSize resourceBOffset,
4117 VkDeviceSize pageSize)
4119 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4120 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4121 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4122 VkDeviceSize resourceBStart = resourceBOffset;
4123 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4124 return resourceAEndPage == resourceBStartPage;
4127 enum VmaSuballocationType
4129 VMA_SUBALLOCATION_TYPE_FREE = 0,
4130 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4131 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4132 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4133 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4134 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4135 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4144 static inline bool VmaIsBufferImageGranularityConflict(
4145 VmaSuballocationType suballocType1,
4146 VmaSuballocationType suballocType2)
4148 if(suballocType1 > suballocType2)
4150 VMA_SWAP(suballocType1, suballocType2);
4153 switch(suballocType1)
4155 case VMA_SUBALLOCATION_TYPE_FREE:
4157 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4159 case VMA_SUBALLOCATION_TYPE_BUFFER:
4161 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4162 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4163 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4165 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4166 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4167 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4168 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4170 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4171 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4179 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4181 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4182 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4183 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4184 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4186 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4193 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4195 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4196 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4197 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4198 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4200 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4213 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4215 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4216 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4217 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4218 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4224 VMA_CLASS_NO_COPY(VmaMutexLock)
4226 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4227 m_pMutex(useMutex ? &mutex : VMA_NULL)
4228 {
if(m_pMutex) { m_pMutex->Lock(); } }
4230 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4232 VMA_MUTEX* m_pMutex;
4236 struct VmaMutexLockRead
4238 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4240 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4241 m_pMutex(useMutex ? &mutex : VMA_NULL)
4242 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4243 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4245 VMA_RW_MUTEX* m_pMutex;
4249 struct VmaMutexLockWrite
4251 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4253 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4254 m_pMutex(useMutex ? &mutex : VMA_NULL)
4255 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4256 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4258 VMA_RW_MUTEX* m_pMutex;
4261 #if VMA_DEBUG_GLOBAL_MUTEX
4262 static VMA_MUTEX gDebugGlobalMutex;
4263 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4265 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4269 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4280 template <
typename CmpLess,
typename IterT,
typename KeyT>
4281 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4283 size_t down = 0, up = (end - beg);
4286 const size_t mid = (down + up) / 2;
4287 if(cmp(*(beg+mid), key))
4299 template<
typename CmpLess,
typename IterT,
typename KeyT>
4300 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4302 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4303 beg, end, value, cmp);
4305 (!cmp(*it, value) && !cmp(value, *it)))
4317 template<
typename T>
4318 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4320 for(uint32_t i = 0; i < count; ++i)
4322 const T iPtr = arr[i];
4323 if(iPtr == VMA_NULL)
4327 for(uint32_t j = i + 1; j < count; ++j)
4341 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4343 if((pAllocationCallbacks != VMA_NULL) &&
4344 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4346 return (*pAllocationCallbacks->pfnAllocation)(
4347 pAllocationCallbacks->pUserData,
4350 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4354 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4358 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4360 if((pAllocationCallbacks != VMA_NULL) &&
4361 (pAllocationCallbacks->pfnFree != VMA_NULL))
4363 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4367 VMA_SYSTEM_FREE(ptr);
4371 template<
typename T>
4372 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4374 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4377 template<
typename T>
4378 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4380 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4383 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4385 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4387 template<
typename T>
4388 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4391 VmaFree(pAllocationCallbacks, ptr);
4394 template<
typename T>
4395 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4399 for(
size_t i = count; i--; )
4403 VmaFree(pAllocationCallbacks, ptr);
4407 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4409 if(srcStr != VMA_NULL)
4411 const size_t len = strlen(srcStr);
4412 char*
const result = vma_new_array(allocs,
char, len + 1);
4413 memcpy(result, srcStr, len + 1);
4422 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4426 const size_t len = strlen(str);
4427 vma_delete_array(allocs, str, len + 1);
4432 template<
typename T>
4433 class VmaStlAllocator
4436 const VkAllocationCallbacks*
const m_pCallbacks;
4437 typedef T value_type;
4439 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4440 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4442 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4443 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4445 template<
typename U>
4446 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4448 return m_pCallbacks == rhs.m_pCallbacks;
4450 template<
typename U>
4451 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4453 return m_pCallbacks != rhs.m_pCallbacks;
4456 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4459 #if VMA_USE_STL_VECTOR
4461 #define VmaVector std::vector
4463 template<
typename T,
typename allocatorT>
4464 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4466 vec.insert(vec.begin() + index, item);
4469 template<
typename T,
typename allocatorT>
4470 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4472 vec.erase(vec.begin() + index);
4475 #else // #if VMA_USE_STL_VECTOR
4480 template<
typename T,
typename AllocatorT>
4484 typedef T value_type;
4486 VmaVector(
const AllocatorT& allocator) :
4487 m_Allocator(allocator),
4494 VmaVector(
size_t count,
const AllocatorT& allocator) :
4495 m_Allocator(allocator),
4496 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4504 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4505 : VmaVector(count, allocator) {}
4507 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4508 m_Allocator(src.m_Allocator),
4509 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4510 m_Count(src.m_Count),
4511 m_Capacity(src.m_Count)
4515 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4521 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4524 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4528 resize(rhs.m_Count);
4531 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4537 bool empty()
const {
return m_Count == 0; }
4538 size_t size()
const {
return m_Count; }
4539 T* data() {
return m_pArray; }
4540 const T* data()
const {
return m_pArray; }
4542 T& operator[](
size_t index)
4544 VMA_HEAVY_ASSERT(index < m_Count);
4545 return m_pArray[index];
4547 const T& operator[](
size_t index)
const
4549 VMA_HEAVY_ASSERT(index < m_Count);
4550 return m_pArray[index];
4555 VMA_HEAVY_ASSERT(m_Count > 0);
4558 const T& front()
const
4560 VMA_HEAVY_ASSERT(m_Count > 0);
4565 VMA_HEAVY_ASSERT(m_Count > 0);
4566 return m_pArray[m_Count - 1];
4568 const T& back()
const
4570 VMA_HEAVY_ASSERT(m_Count > 0);
4571 return m_pArray[m_Count - 1];
4574 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4576 newCapacity = VMA_MAX(newCapacity, m_Count);
4578 if((newCapacity < m_Capacity) && !freeMemory)
4580 newCapacity = m_Capacity;
4583 if(newCapacity != m_Capacity)
4585 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4588 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4590 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4591 m_Capacity = newCapacity;
4592 m_pArray = newArray;
4596 void resize(
size_t newCount,
bool freeMemory =
false)
4598 size_t newCapacity = m_Capacity;
4599 if(newCount > m_Capacity)
4601 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4605 newCapacity = newCount;
4608 if(newCapacity != m_Capacity)
4610 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4611 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4612 if(elementsToCopy != 0)
4614 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4616 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4617 m_Capacity = newCapacity;
4618 m_pArray = newArray;
4624 void clear(
bool freeMemory =
false)
4626 resize(0, freeMemory);
4629 void insert(
size_t index,
const T& src)
4631 VMA_HEAVY_ASSERT(index <= m_Count);
4632 const size_t oldCount = size();
4633 resize(oldCount + 1);
4634 if(index < oldCount)
4636 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4638 m_pArray[index] = src;
4641 void remove(
size_t index)
4643 VMA_HEAVY_ASSERT(index < m_Count);
4644 const size_t oldCount = size();
4645 if(index < oldCount - 1)
4647 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4649 resize(oldCount - 1);
4652 void push_back(
const T& src)
4654 const size_t newIndex = size();
4655 resize(newIndex + 1);
4656 m_pArray[newIndex] = src;
4661 VMA_HEAVY_ASSERT(m_Count > 0);
4665 void push_front(
const T& src)
4672 VMA_HEAVY_ASSERT(m_Count > 0);
4676 typedef T* iterator;
4678 iterator begin() {
return m_pArray; }
4679 iterator end() {
return m_pArray + m_Count; }
4682 AllocatorT m_Allocator;
4688 template<
typename T,
typename allocatorT>
4689 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4691 vec.insert(index, item);
4694 template<
typename T,
typename allocatorT>
4695 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4700 #endif // #if VMA_USE_STL_VECTOR
4702 template<
typename CmpLess,
typename VectorT>
4703 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4705 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4707 vector.data() + vector.size(),
4709 CmpLess()) - vector.data();
4710 VmaVectorInsert(vector, indexToInsert, value);
4711 return indexToInsert;
4714 template<
typename CmpLess,
typename VectorT>
4715 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4718 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4723 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4725 size_t indexToRemove = it - vector.begin();
4726 VmaVectorRemove(vector, indexToRemove);
4740 template<
typename T>
4741 class VmaPoolAllocator
4743 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4745 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4746 ~VmaPoolAllocator();
4747 template<
typename... Types> T* Alloc(Types... args);
4753 uint32_t NextFreeIndex;
4754 alignas(T)
char Value[
sizeof(T)];
4761 uint32_t FirstFreeIndex;
4764 const VkAllocationCallbacks* m_pAllocationCallbacks;
4765 const uint32_t m_FirstBlockCapacity;
4766 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4768 ItemBlock& CreateNewBlock();
4771 template<
typename T>
4772 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4773 m_pAllocationCallbacks(pAllocationCallbacks),
4774 m_FirstBlockCapacity(firstBlockCapacity),
4775 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4777 VMA_ASSERT(m_FirstBlockCapacity > 1);
4780 template<
typename T>
4781 VmaPoolAllocator<T>::~VmaPoolAllocator()
4783 for(
size_t i = m_ItemBlocks.size(); i--; )
4784 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4785 m_ItemBlocks.clear();
4788 template<
typename T>
4789 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
4791 for(
size_t i = m_ItemBlocks.size(); i--; )
4793 ItemBlock& block = m_ItemBlocks[i];
4795 if(block.FirstFreeIndex != UINT32_MAX)
4797 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4798 block.FirstFreeIndex = pItem->NextFreeIndex;
4799 T* result = (T*)&pItem->Value;
4800 new(result)T(std::forward<Types>(args)...);
4806 ItemBlock& newBlock = CreateNewBlock();
4807 Item*
const pItem = &newBlock.pItems[0];
4808 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4809 T* result = (T*)&pItem->Value;
4810 new(result)T(std::forward<Types>(args)...);
4814 template<
typename T>
4815 void VmaPoolAllocator<T>::Free(T* ptr)
4818 for(
size_t i = m_ItemBlocks.size(); i--; )
4820 ItemBlock& block = m_ItemBlocks[i];
4824 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4827 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4830 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4831 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4832 block.FirstFreeIndex = index;
4836 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4839 template<
typename T>
4840 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4842 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4843 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4845 const ItemBlock newBlock = {
4846 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4850 m_ItemBlocks.push_back(newBlock);
4853 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4854 newBlock.pItems[i].NextFreeIndex = i + 1;
4855 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4856 return m_ItemBlocks.back();
4862 #if VMA_USE_STL_LIST
4864 #define VmaList std::list
4866 #else // #if VMA_USE_STL_LIST
4868 template<
typename T>
4877 template<
typename T>
4880 VMA_CLASS_NO_COPY(VmaRawList)
4882 typedef VmaListItem<T> ItemType;
4884 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4888 size_t GetCount()
const {
return m_Count; }
4889 bool IsEmpty()
const {
return m_Count == 0; }
4891 ItemType* Front() {
return m_pFront; }
4892 const ItemType* Front()
const {
return m_pFront; }
4893 ItemType* Back() {
return m_pBack; }
4894 const ItemType* Back()
const {
return m_pBack; }
4896 ItemType* PushBack();
4897 ItemType* PushFront();
4898 ItemType* PushBack(
const T& value);
4899 ItemType* PushFront(
const T& value);
4904 ItemType* InsertBefore(ItemType* pItem);
4906 ItemType* InsertAfter(ItemType* pItem);
4908 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4909 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4911 void Remove(ItemType* pItem);
4914 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4915 VmaPoolAllocator<ItemType> m_ItemAllocator;
4921 template<
typename T>
4922 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4923 m_pAllocationCallbacks(pAllocationCallbacks),
4924 m_ItemAllocator(pAllocationCallbacks, 128),
4931 template<
typename T>
4932 VmaRawList<T>::~VmaRawList()
4938 template<
typename T>
4939 void VmaRawList<T>::Clear()
4941 if(IsEmpty() ==
false)
4943 ItemType* pItem = m_pBack;
4944 while(pItem != VMA_NULL)
4946 ItemType*
const pPrevItem = pItem->pPrev;
4947 m_ItemAllocator.Free(pItem);
4950 m_pFront = VMA_NULL;
4956 template<
typename T>
4957 VmaListItem<T>* VmaRawList<T>::PushBack()
4959 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4960 pNewItem->pNext = VMA_NULL;
4963 pNewItem->pPrev = VMA_NULL;
4964 m_pFront = pNewItem;
4970 pNewItem->pPrev = m_pBack;
4971 m_pBack->pNext = pNewItem;
4978 template<
typename T>
4979 VmaListItem<T>* VmaRawList<T>::PushFront()
4981 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4982 pNewItem->pPrev = VMA_NULL;
4985 pNewItem->pNext = VMA_NULL;
4986 m_pFront = pNewItem;
4992 pNewItem->pNext = m_pFront;
4993 m_pFront->pPrev = pNewItem;
4994 m_pFront = pNewItem;
5000 template<
typename T>
5001 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5003 ItemType*
const pNewItem = PushBack();
5004 pNewItem->Value = value;
5008 template<
typename T>
5009 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5011 ItemType*
const pNewItem = PushFront();
5012 pNewItem->Value = value;
5016 template<
typename T>
5017 void VmaRawList<T>::PopBack()
5019 VMA_HEAVY_ASSERT(m_Count > 0);
5020 ItemType*
const pBackItem = m_pBack;
5021 ItemType*
const pPrevItem = pBackItem->pPrev;
5022 if(pPrevItem != VMA_NULL)
5024 pPrevItem->pNext = VMA_NULL;
5026 m_pBack = pPrevItem;
5027 m_ItemAllocator.Free(pBackItem);
5031 template<
typename T>
5032 void VmaRawList<T>::PopFront()
5034 VMA_HEAVY_ASSERT(m_Count > 0);
5035 ItemType*
const pFrontItem = m_pFront;
5036 ItemType*
const pNextItem = pFrontItem->pNext;
5037 if(pNextItem != VMA_NULL)
5039 pNextItem->pPrev = VMA_NULL;
5041 m_pFront = pNextItem;
5042 m_ItemAllocator.Free(pFrontItem);
5046 template<
typename T>
5047 void VmaRawList<T>::Remove(ItemType* pItem)
5049 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5050 VMA_HEAVY_ASSERT(m_Count > 0);
5052 if(pItem->pPrev != VMA_NULL)
5054 pItem->pPrev->pNext = pItem->pNext;
5058 VMA_HEAVY_ASSERT(m_pFront == pItem);
5059 m_pFront = pItem->pNext;
5062 if(pItem->pNext != VMA_NULL)
5064 pItem->pNext->pPrev = pItem->pPrev;
5068 VMA_HEAVY_ASSERT(m_pBack == pItem);
5069 m_pBack = pItem->pPrev;
5072 m_ItemAllocator.Free(pItem);
5076 template<
typename T>
5077 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5079 if(pItem != VMA_NULL)
5081 ItemType*
const prevItem = pItem->pPrev;
5082 ItemType*
const newItem = m_ItemAllocator.Alloc();
5083 newItem->pPrev = prevItem;
5084 newItem->pNext = pItem;
5085 pItem->pPrev = newItem;
5086 if(prevItem != VMA_NULL)
5088 prevItem->pNext = newItem;
5092 VMA_HEAVY_ASSERT(m_pFront == pItem);
5102 template<
typename T>
5103 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5105 if(pItem != VMA_NULL)
5107 ItemType*
const nextItem = pItem->pNext;
5108 ItemType*
const newItem = m_ItemAllocator.Alloc();
5109 newItem->pNext = nextItem;
5110 newItem->pPrev = pItem;
5111 pItem->pNext = newItem;
5112 if(nextItem != VMA_NULL)
5114 nextItem->pPrev = newItem;
5118 VMA_HEAVY_ASSERT(m_pBack == pItem);
5128 template<
typename T>
5129 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5131 ItemType*
const newItem = InsertBefore(pItem);
5132 newItem->Value = value;
5136 template<
typename T>
5137 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5139 ItemType*
const newItem = InsertAfter(pItem);
5140 newItem->Value = value;
5144 template<
typename T,
typename AllocatorT>
5147 VMA_CLASS_NO_COPY(VmaList)
5158 T& operator*()
const
5160 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5161 return m_pItem->Value;
5163 T* operator->()
const
5165 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5166 return &m_pItem->Value;
5169 iterator& operator++()
5171 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5172 m_pItem = m_pItem->pNext;
5175 iterator& operator--()
5177 if(m_pItem != VMA_NULL)
5179 m_pItem = m_pItem->pPrev;
5183 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5184 m_pItem = m_pList->Back();
5189 iterator operator++(
int)
5191 iterator result = *
this;
5195 iterator operator--(
int)
5197 iterator result = *
this;
5202 bool operator==(
const iterator& rhs)
const
5204 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5205 return m_pItem == rhs.m_pItem;
5207 bool operator!=(
const iterator& rhs)
const
5209 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5210 return m_pItem != rhs.m_pItem;
5214 VmaRawList<T>* m_pList;
5215 VmaListItem<T>* m_pItem;
5217 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5223 friend class VmaList<T, AllocatorT>;
5226 class const_iterator
5235 const_iterator(
const iterator& src) :
5236 m_pList(src.m_pList),
5237 m_pItem(src.m_pItem)
5241 const T& operator*()
const
5243 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5244 return m_pItem->Value;
5246 const T* operator->()
const
5248 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5249 return &m_pItem->Value;
5252 const_iterator& operator++()
5254 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5255 m_pItem = m_pItem->pNext;
5258 const_iterator& operator--()
5260 if(m_pItem != VMA_NULL)
5262 m_pItem = m_pItem->pPrev;
5266 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5267 m_pItem = m_pList->Back();
5272 const_iterator operator++(
int)
5274 const_iterator result = *
this;
5278 const_iterator operator--(
int)
5280 const_iterator result = *
this;
5285 bool operator==(
const const_iterator& rhs)
const
5287 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5288 return m_pItem == rhs.m_pItem;
5290 bool operator!=(
const const_iterator& rhs)
const
5292 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5293 return m_pItem != rhs.m_pItem;
5297 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5303 const VmaRawList<T>* m_pList;
5304 const VmaListItem<T>* m_pItem;
5306 friend class VmaList<T, AllocatorT>;
5309 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5311 bool empty()
const {
return m_RawList.IsEmpty(); }
5312 size_t size()
const {
return m_RawList.GetCount(); }
5314 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5315 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5317 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5318 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5320 void clear() { m_RawList.Clear(); }
5321 void push_back(
const T& value) { m_RawList.PushBack(value); }
5322 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5323 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5326 VmaRawList<T> m_RawList;
5329 #endif // #if VMA_USE_STL_LIST
5337 #if VMA_USE_STL_UNORDERED_MAP
5339 #define VmaPair std::pair
5341 #define VMA_MAP_TYPE(KeyT, ValueT) \
5342 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5344 #else // #if VMA_USE_STL_UNORDERED_MAP
5346 template<
typename T1,
typename T2>
5352 VmaPair() : first(), second() { }
5353 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5359 template<
typename KeyT,
typename ValueT>
5363 typedef VmaPair<KeyT, ValueT> PairType;
5364 typedef PairType* iterator;
5366 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5368 iterator begin() {
return m_Vector.begin(); }
5369 iterator end() {
return m_Vector.end(); }
5371 void insert(
const PairType& pair);
5372 iterator find(
const KeyT& key);
5373 void erase(iterator it);
5376 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5379 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5381 template<
typename FirstT,
typename SecondT>
5382 struct VmaPairFirstLess
5384 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5386 return lhs.first < rhs.first;
5388 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5390 return lhs.first < rhsFirst;
5394 template<
typename KeyT,
typename ValueT>
5395 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5397 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5399 m_Vector.data() + m_Vector.size(),
5401 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5402 VmaVectorInsert(m_Vector, indexToInsert, pair);
5405 template<
typename KeyT,
typename ValueT>
5406 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5408 PairType* it = VmaBinaryFindFirstNotLess(
5410 m_Vector.data() + m_Vector.size(),
5412 VmaPairFirstLess<KeyT, ValueT>());
5413 if((it != m_Vector.end()) && (it->first == key))
5419 return m_Vector.end();
5423 template<
typename KeyT,
typename ValueT>
5424 void VmaMap<KeyT, ValueT>::erase(iterator it)
5426 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5429 #endif // #if VMA_USE_STL_UNORDERED_MAP
5435 class VmaDeviceMemoryBlock;
5437 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5439 struct VmaAllocation_T
5442 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5446 FLAG_USER_DATA_STRING = 0x01,
5450 enum ALLOCATION_TYPE
5452 ALLOCATION_TYPE_NONE,
5453 ALLOCATION_TYPE_BLOCK,
5454 ALLOCATION_TYPE_DEDICATED,
5461 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5464 m_pUserData{VMA_NULL},
5465 m_LastUseFrameIndex{currentFrameIndex},
5466 m_MemoryTypeIndex{0},
5467 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5468 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5470 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5472 #if VMA_STATS_STRING_ENABLED
5473 m_CreationFrameIndex = currentFrameIndex;
5474 m_BufferImageUsage = 0;
5480 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5483 VMA_ASSERT(m_pUserData == VMA_NULL);
5486 void InitBlockAllocation(
5487 VmaDeviceMemoryBlock* block,
5488 VkDeviceSize offset,
5489 VkDeviceSize alignment,
5491 uint32_t memoryTypeIndex,
5492 VmaSuballocationType suballocationType,
5496 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5497 VMA_ASSERT(block != VMA_NULL);
5498 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5499 m_Alignment = alignment;
5501 m_MemoryTypeIndex = memoryTypeIndex;
5502 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5503 m_SuballocationType = (uint8_t)suballocationType;
5504 m_BlockAllocation.m_Block = block;
5505 m_BlockAllocation.m_Offset = offset;
5506 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5511 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5512 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5513 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5514 m_MemoryTypeIndex = 0;
5515 m_BlockAllocation.m_Block = VMA_NULL;
5516 m_BlockAllocation.m_Offset = 0;
5517 m_BlockAllocation.m_CanBecomeLost =
true;
5520 void ChangeBlockAllocation(
5522 VmaDeviceMemoryBlock* block,
5523 VkDeviceSize offset);
5525 void ChangeOffset(VkDeviceSize newOffset);
5528 void InitDedicatedAllocation(
5529 uint32_t memoryTypeIndex,
5530 VkDeviceMemory hMemory,
5531 VmaSuballocationType suballocationType,
5535 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5536 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5537 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5540 m_MemoryTypeIndex = memoryTypeIndex;
5541 m_SuballocationType = (uint8_t)suballocationType;
5542 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5543 m_DedicatedAllocation.m_hMemory = hMemory;
5544 m_DedicatedAllocation.m_pMappedData = pMappedData;
5547 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5548 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5549 VkDeviceSize GetSize()
const {
return m_Size; }
5550 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5551 void* GetUserData()
const {
return m_pUserData; }
5552 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5553 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5555 VmaDeviceMemoryBlock* GetBlock()
const
5557 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5558 return m_BlockAllocation.m_Block;
5560 VkDeviceSize GetOffset()
const;
5561 VkDeviceMemory GetMemory()
const;
5562 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5563 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5564 void* GetMappedData()
const;
5565 bool CanBecomeLost()
const;
5567 uint32_t GetLastUseFrameIndex()
const
5569 return m_LastUseFrameIndex.load();
5571 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5573 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5583 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5585 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5587 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5598 void BlockAllocMap();
5599 void BlockAllocUnmap();
5600 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5603 #if VMA_STATS_STRING_ENABLED
5604 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5605 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5607 void InitBufferImageUsage(uint32_t bufferImageUsage)
5609 VMA_ASSERT(m_BufferImageUsage == 0);
5610 m_BufferImageUsage = bufferImageUsage;
5613 void PrintParameters(
class VmaJsonWriter& json)
const;
5617 VkDeviceSize m_Alignment;
5618 VkDeviceSize m_Size;
5620 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5621 uint32_t m_MemoryTypeIndex;
5623 uint8_t m_SuballocationType;
5630 struct BlockAllocation
5632 VmaDeviceMemoryBlock* m_Block;
5633 VkDeviceSize m_Offset;
5634 bool m_CanBecomeLost;
5638 struct DedicatedAllocation
5640 VkDeviceMemory m_hMemory;
5641 void* m_pMappedData;
5647 BlockAllocation m_BlockAllocation;
5649 DedicatedAllocation m_DedicatedAllocation;
5652 #if VMA_STATS_STRING_ENABLED
5653 uint32_t m_CreationFrameIndex;
5654 uint32_t m_BufferImageUsage;
5664 struct VmaSuballocation
5666 VkDeviceSize offset;
5669 VmaSuballocationType type;
5673 struct VmaSuballocationOffsetLess
5675 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5677 return lhs.offset < rhs.offset;
5680 struct VmaSuballocationOffsetGreater
5682 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5684 return lhs.offset > rhs.offset;
5688 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5691 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5693 enum class VmaAllocationRequestType
5715 struct VmaAllocationRequest
5717 VkDeviceSize offset;
5718 VkDeviceSize sumFreeSize;
5719 VkDeviceSize sumItemSize;
5720 VmaSuballocationList::iterator item;
5721 size_t itemsToMakeLostCount;
5723 VmaAllocationRequestType type;
5725 VkDeviceSize CalcCost()
const
5727 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5735 class VmaBlockMetadata
5739 virtual ~VmaBlockMetadata() { }
5740 virtual void Init(VkDeviceSize size) { m_Size = size; }
5743 virtual bool Validate()
const = 0;
5744 VkDeviceSize GetSize()
const {
return m_Size; }
5745 virtual size_t GetAllocationCount()
const = 0;
5746 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5747 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5749 virtual bool IsEmpty()
const = 0;
5751 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5753 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5755 #if VMA_STATS_STRING_ENABLED
5756 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5762 virtual bool CreateAllocationRequest(
5763 uint32_t currentFrameIndex,
5764 uint32_t frameInUseCount,
5765 VkDeviceSize bufferImageGranularity,
5766 VkDeviceSize allocSize,
5767 VkDeviceSize allocAlignment,
5769 VmaSuballocationType allocType,
5770 bool canMakeOtherLost,
5773 VmaAllocationRequest* pAllocationRequest) = 0;
5775 virtual bool MakeRequestedAllocationsLost(
5776 uint32_t currentFrameIndex,
5777 uint32_t frameInUseCount,
5778 VmaAllocationRequest* pAllocationRequest) = 0;
5780 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5782 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5786 const VmaAllocationRequest& request,
5787 VmaSuballocationType type,
5788 VkDeviceSize allocSize,
5793 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5796 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5798 #if VMA_STATS_STRING_ENABLED
5799 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5800 VkDeviceSize unusedBytes,
5801 size_t allocationCount,
5802 size_t unusedRangeCount)
const;
5803 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5804 VkDeviceSize offset,
5806 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5807 VkDeviceSize offset,
5808 VkDeviceSize size)
const;
5809 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5813 VkDeviceSize m_Size;
5814 const VkAllocationCallbacks* m_pAllocationCallbacks;
5817 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5818 VMA_ASSERT(0 && "Validation failed: " #cond); \
5822 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5824 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5827 virtual ~VmaBlockMetadata_Generic();
5828 virtual void Init(VkDeviceSize size);
5830 virtual bool Validate()
const;
5831 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5832 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5833 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5834 virtual bool IsEmpty()
const;
5836 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5837 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5839 #if VMA_STATS_STRING_ENABLED
5840 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5843 virtual bool CreateAllocationRequest(
5844 uint32_t currentFrameIndex,
5845 uint32_t frameInUseCount,
5846 VkDeviceSize bufferImageGranularity,
5847 VkDeviceSize allocSize,
5848 VkDeviceSize allocAlignment,
5850 VmaSuballocationType allocType,
5851 bool canMakeOtherLost,
5853 VmaAllocationRequest* pAllocationRequest);
5855 virtual bool MakeRequestedAllocationsLost(
5856 uint32_t currentFrameIndex,
5857 uint32_t frameInUseCount,
5858 VmaAllocationRequest* pAllocationRequest);
5860 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5862 virtual VkResult CheckCorruption(
const void* pBlockData);
5865 const VmaAllocationRequest& request,
5866 VmaSuballocationType type,
5867 VkDeviceSize allocSize,
5871 virtual void FreeAtOffset(VkDeviceSize offset);
5876 bool IsBufferImageGranularityConflictPossible(
5877 VkDeviceSize bufferImageGranularity,
5878 VmaSuballocationType& inOutPrevSuballocType)
const;
5881 friend class VmaDefragmentationAlgorithm_Generic;
5882 friend class VmaDefragmentationAlgorithm_Fast;
5884 uint32_t m_FreeCount;
5885 VkDeviceSize m_SumFreeSize;
5886 VmaSuballocationList m_Suballocations;
5889 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5891 bool ValidateFreeSuballocationList()
const;
5895 bool CheckAllocation(
5896 uint32_t currentFrameIndex,
5897 uint32_t frameInUseCount,
5898 VkDeviceSize bufferImageGranularity,
5899 VkDeviceSize allocSize,
5900 VkDeviceSize allocAlignment,
5901 VmaSuballocationType allocType,
5902 VmaSuballocationList::const_iterator suballocItem,
5903 bool canMakeOtherLost,
5904 VkDeviceSize* pOffset,
5905 size_t* itemsToMakeLostCount,
5906 VkDeviceSize* pSumFreeSize,
5907 VkDeviceSize* pSumItemSize)
const;
5909 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5913 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5916 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5919 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6000 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6002 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6005 virtual ~VmaBlockMetadata_Linear();
6006 virtual void Init(VkDeviceSize size);
6008 virtual bool Validate()
const;
6009 virtual size_t GetAllocationCount()
const;
6010 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6011 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6012 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6014 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6015 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6017 #if VMA_STATS_STRING_ENABLED
6018 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6021 virtual bool CreateAllocationRequest(
6022 uint32_t currentFrameIndex,
6023 uint32_t frameInUseCount,
6024 VkDeviceSize bufferImageGranularity,
6025 VkDeviceSize allocSize,
6026 VkDeviceSize allocAlignment,
6028 VmaSuballocationType allocType,
6029 bool canMakeOtherLost,
6031 VmaAllocationRequest* pAllocationRequest);
6033 virtual bool MakeRequestedAllocationsLost(
6034 uint32_t currentFrameIndex,
6035 uint32_t frameInUseCount,
6036 VmaAllocationRequest* pAllocationRequest);
6038 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6040 virtual VkResult CheckCorruption(
const void* pBlockData);
6043 const VmaAllocationRequest& request,
6044 VmaSuballocationType type,
6045 VkDeviceSize allocSize,
6049 virtual void FreeAtOffset(VkDeviceSize offset);
6059 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6061 enum SECOND_VECTOR_MODE
6063 SECOND_VECTOR_EMPTY,
6068 SECOND_VECTOR_RING_BUFFER,
6074 SECOND_VECTOR_DOUBLE_STACK,
6077 VkDeviceSize m_SumFreeSize;
6078 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6079 uint32_t m_1stVectorIndex;
6080 SECOND_VECTOR_MODE m_2ndVectorMode;
6082 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6083 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6084 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6085 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6088 size_t m_1stNullItemsBeginCount;
6090 size_t m_1stNullItemsMiddleCount;
6092 size_t m_2ndNullItemsCount;
6094 bool ShouldCompact1st()
const;
6095 void CleanupAfterFree();
6097 bool CreateAllocationRequest_LowerAddress(
6098 uint32_t currentFrameIndex,
6099 uint32_t frameInUseCount,
6100 VkDeviceSize bufferImageGranularity,
6101 VkDeviceSize allocSize,
6102 VkDeviceSize allocAlignment,
6103 VmaSuballocationType allocType,
6104 bool canMakeOtherLost,
6106 VmaAllocationRequest* pAllocationRequest);
6107 bool CreateAllocationRequest_UpperAddress(
6108 uint32_t currentFrameIndex,
6109 uint32_t frameInUseCount,
6110 VkDeviceSize bufferImageGranularity,
6111 VkDeviceSize allocSize,
6112 VkDeviceSize allocAlignment,
6113 VmaSuballocationType allocType,
6114 bool canMakeOtherLost,
6116 VmaAllocationRequest* pAllocationRequest);
6130 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6132 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6135 virtual ~VmaBlockMetadata_Buddy();
6136 virtual void Init(VkDeviceSize size);
6138 virtual bool Validate()
const;
6139 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6140 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6141 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6142 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6144 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6145 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6147 #if VMA_STATS_STRING_ENABLED
6148 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6151 virtual bool CreateAllocationRequest(
6152 uint32_t currentFrameIndex,
6153 uint32_t frameInUseCount,
6154 VkDeviceSize bufferImageGranularity,
6155 VkDeviceSize allocSize,
6156 VkDeviceSize allocAlignment,
6158 VmaSuballocationType allocType,
6159 bool canMakeOtherLost,
6161 VmaAllocationRequest* pAllocationRequest);
6163 virtual bool MakeRequestedAllocationsLost(
6164 uint32_t currentFrameIndex,
6165 uint32_t frameInUseCount,
6166 VmaAllocationRequest* pAllocationRequest);
6168 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6170 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6173 const VmaAllocationRequest& request,
6174 VmaSuballocationType type,
6175 VkDeviceSize allocSize,
6178 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6179 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6182 static const VkDeviceSize MIN_NODE_SIZE = 32;
6183 static const size_t MAX_LEVELS = 30;
6185 struct ValidationContext
6187 size_t calculatedAllocationCount;
6188 size_t calculatedFreeCount;
6189 VkDeviceSize calculatedSumFreeSize;
6191 ValidationContext() :
6192 calculatedAllocationCount(0),
6193 calculatedFreeCount(0),
6194 calculatedSumFreeSize(0) { }
6199 VkDeviceSize offset;
6229 VkDeviceSize m_UsableSize;
6230 uint32_t m_LevelCount;
6236 } m_FreeList[MAX_LEVELS];
6238 size_t m_AllocationCount;
6242 VkDeviceSize m_SumFreeSize;
6244 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6245 void DeleteNode(Node* node);
6246 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6247 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6248 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6250 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6251 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6255 void AddToFreeListFront(uint32_t level, Node* node);
6259 void RemoveFromFreeList(uint32_t level, Node* node);
6261 #if VMA_STATS_STRING_ENABLED
6262 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6272 class VmaDeviceMemoryBlock
6274 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6276 VmaBlockMetadata* m_pMetadata;
6280 ~VmaDeviceMemoryBlock()
6282 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6283 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6290 uint32_t newMemoryTypeIndex,
6291 VkDeviceMemory newMemory,
6292 VkDeviceSize newSize,
6294 uint32_t algorithm);
6298 VmaPool GetParentPool()
const {
return m_hParentPool; }
6299 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6300 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6301 uint32_t GetId()
const {
return m_Id; }
6302 void* GetMappedData()
const {
return m_pMappedData; }
6305 bool Validate()
const;
6310 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6313 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6314 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6316 VkResult BindBufferMemory(
6319 VkDeviceSize allocationLocalOffset,
6322 VkResult BindImageMemory(
6325 VkDeviceSize allocationLocalOffset,
6331 uint32_t m_MemoryTypeIndex;
6333 VkDeviceMemory m_hMemory;
6341 uint32_t m_MapCount;
6342 void* m_pMappedData;
6345 struct VmaPointerLess
6347 bool operator()(
const void* lhs,
const void* rhs)
const
6353 struct VmaDefragmentationMove
6355 size_t srcBlockIndex;
6356 size_t dstBlockIndex;
6357 VkDeviceSize srcOffset;
6358 VkDeviceSize dstOffset;
6361 VmaDeviceMemoryBlock* pSrcBlock;
6362 VmaDeviceMemoryBlock* pDstBlock;
6365 class VmaDefragmentationAlgorithm;
6373 struct VmaBlockVector
6375 VMA_CLASS_NO_COPY(VmaBlockVector)
6380 uint32_t memoryTypeIndex,
6381 VkDeviceSize preferredBlockSize,
6382 size_t minBlockCount,
6383 size_t maxBlockCount,
6384 VkDeviceSize bufferImageGranularity,
6385 uint32_t frameInUseCount,
6386 bool explicitBlockSize,
6387 uint32_t algorithm);
6390 VkResult CreateMinBlocks();
6392 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6393 VmaPool GetParentPool()
const {
return m_hParentPool; }
6394 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6395 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6396 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6397 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6398 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6399 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6404 bool IsCorruptionDetectionEnabled()
const;
6407 uint32_t currentFrameIndex,
6409 VkDeviceSize alignment,
6411 VmaSuballocationType suballocType,
6412 size_t allocationCount,
6420 #if VMA_STATS_STRING_ENABLED
6421 void PrintDetailedMap(
class VmaJsonWriter& json);
6424 void MakePoolAllocationsLost(
6425 uint32_t currentFrameIndex,
6426 size_t* pLostAllocationCount);
6427 VkResult CheckCorruption();
6431 class VmaBlockVectorDefragmentationContext* pCtx,
6433 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6434 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6435 VkCommandBuffer commandBuffer);
6436 void DefragmentationEnd(
6437 class VmaBlockVectorDefragmentationContext* pCtx,
6440 uint32_t ProcessDefragmentations(
6441 class VmaBlockVectorDefragmentationContext *pCtx,
6444 void CommitDefragmentations(
6445 class VmaBlockVectorDefragmentationContext *pCtx,
6451 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6452 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6453 size_t CalcAllocationCount()
const;
6454 bool IsBufferImageGranularityConflictPossible()
const;
6457 friend class VmaDefragmentationAlgorithm_Generic;
6461 const uint32_t m_MemoryTypeIndex;
6462 const VkDeviceSize m_PreferredBlockSize;
6463 const size_t m_MinBlockCount;
6464 const size_t m_MaxBlockCount;
6465 const VkDeviceSize m_BufferImageGranularity;
6466 const uint32_t m_FrameInUseCount;
6467 const bool m_ExplicitBlockSize;
6468 const uint32_t m_Algorithm;
6469 VMA_RW_MUTEX m_Mutex;
6473 bool m_HasEmptyBlock;
6475 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6476 uint32_t m_NextBlockId;
6478 VkDeviceSize CalcMaxBlockSize()
const;
6481 void Remove(VmaDeviceMemoryBlock* pBlock);
6485 void IncrementallySortBlocks();
6487 VkResult AllocatePage(
6488 uint32_t currentFrameIndex,
6490 VkDeviceSize alignment,
6492 VmaSuballocationType suballocType,
6496 VkResult AllocateFromBlock(
6497 VmaDeviceMemoryBlock* pBlock,
6498 uint32_t currentFrameIndex,
6500 VkDeviceSize alignment,
6503 VmaSuballocationType suballocType,
6507 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6510 void ApplyDefragmentationMovesCpu(
6511 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6512 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6514 void ApplyDefragmentationMovesGpu(
6515 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6516 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6517 VkCommandBuffer commandBuffer);
6525 void UpdateHasEmptyBlock();
6530 VMA_CLASS_NO_COPY(VmaPool_T)
6532 VmaBlockVector m_BlockVector;
6537 VkDeviceSize preferredBlockSize);
6540 uint32_t GetId()
const {
return m_Id; }
6541 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6543 const char* GetName()
const {
return m_Name; }
6544 void SetName(
const char* pName);
6546 #if VMA_STATS_STRING_ENABLED
6562 class VmaDefragmentationAlgorithm
6564 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6566 VmaDefragmentationAlgorithm(
6568 VmaBlockVector* pBlockVector,
6569 uint32_t currentFrameIndex) :
6570 m_hAllocator(hAllocator),
6571 m_pBlockVector(pBlockVector),
6572 m_CurrentFrameIndex(currentFrameIndex)
6575 virtual ~VmaDefragmentationAlgorithm()
6579 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6580 virtual void AddAll() = 0;
6582 virtual VkResult Defragment(
6583 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6584 VkDeviceSize maxBytesToMove,
6585 uint32_t maxAllocationsToMove,
6588 virtual VkDeviceSize GetBytesMoved()
const = 0;
6589 virtual uint32_t GetAllocationsMoved()
const = 0;
6593 VmaBlockVector*
const m_pBlockVector;
6594 const uint32_t m_CurrentFrameIndex;
6596 struct AllocationInfo
6599 VkBool32* m_pChanged;
6602 m_hAllocation(VK_NULL_HANDLE),
6603 m_pChanged(VMA_NULL)
6607 m_hAllocation(hAlloc),
6608 m_pChanged(pChanged)
6614 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6616 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6618 VmaDefragmentationAlgorithm_Generic(
6620 VmaBlockVector* pBlockVector,
6621 uint32_t currentFrameIndex,
6622 bool overlappingMoveSupported);
6623 virtual ~VmaDefragmentationAlgorithm_Generic();
6625 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6626 virtual void AddAll() { m_AllAllocations =
true; }
6628 virtual VkResult Defragment(
6629 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6630 VkDeviceSize maxBytesToMove,
6631 uint32_t maxAllocationsToMove,
6634 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6635 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6638 uint32_t m_AllocationCount;
6639 bool m_AllAllocations;
6641 VkDeviceSize m_BytesMoved;
6642 uint32_t m_AllocationsMoved;
6644 struct AllocationInfoSizeGreater
6646 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6648 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6652 struct AllocationInfoOffsetGreater
6654 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6656 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6662 size_t m_OriginalBlockIndex;
6663 VmaDeviceMemoryBlock* m_pBlock;
6664 bool m_HasNonMovableAllocations;
6665 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6667 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6668 m_OriginalBlockIndex(SIZE_MAX),
6670 m_HasNonMovableAllocations(true),
6671 m_Allocations(pAllocationCallbacks)
6675 void CalcHasNonMovableAllocations()
6677 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6678 const size_t defragmentAllocCount = m_Allocations.size();
6679 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6682 void SortAllocationsBySizeDescending()
6684 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6687 void SortAllocationsByOffsetDescending()
6689 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6693 struct BlockPointerLess
6695 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6697 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6699 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6701 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6707 struct BlockInfoCompareMoveDestination
6709 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6711 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6715 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6719 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6727 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6728 BlockInfoVector m_Blocks;
6730 VkResult DefragmentRound(
6731 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6732 VkDeviceSize maxBytesToMove,
6733 uint32_t maxAllocationsToMove,
6734 bool freeOldAllocations);
6736 size_t CalcBlocksWithNonMovableCount()
const;
6738 static bool MoveMakesSense(
6739 size_t dstBlockIndex, VkDeviceSize dstOffset,
6740 size_t srcBlockIndex, VkDeviceSize srcOffset);
6743 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6745 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6747 VmaDefragmentationAlgorithm_Fast(
6749 VmaBlockVector* pBlockVector,
6750 uint32_t currentFrameIndex,
6751 bool overlappingMoveSupported);
6752 virtual ~VmaDefragmentationAlgorithm_Fast();
6754 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6755 virtual void AddAll() { m_AllAllocations =
true; }
6757 virtual VkResult Defragment(
6758 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6759 VkDeviceSize maxBytesToMove,
6760 uint32_t maxAllocationsToMove,
6763 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6764 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6769 size_t origBlockIndex;
6772 class FreeSpaceDatabase
6778 s.blockInfoIndex = SIZE_MAX;
6779 for(
size_t i = 0; i < MAX_COUNT; ++i)
6781 m_FreeSpaces[i] = s;
6785 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6787 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6793 size_t bestIndex = SIZE_MAX;
6794 for(
size_t i = 0; i < MAX_COUNT; ++i)
6797 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6802 if(m_FreeSpaces[i].size < size &&
6803 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6809 if(bestIndex != SIZE_MAX)
6811 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6812 m_FreeSpaces[bestIndex].offset = offset;
6813 m_FreeSpaces[bestIndex].size = size;
6817 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6818 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6820 size_t bestIndex = SIZE_MAX;
6821 VkDeviceSize bestFreeSpaceAfter = 0;
6822 for(
size_t i = 0; i < MAX_COUNT; ++i)
6825 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6827 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6829 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6831 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6833 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6836 bestFreeSpaceAfter = freeSpaceAfter;
6842 if(bestIndex != SIZE_MAX)
6844 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6845 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6847 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6850 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6851 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6852 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6857 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6867 static const size_t MAX_COUNT = 4;
6871 size_t blockInfoIndex;
6872 VkDeviceSize offset;
6874 } m_FreeSpaces[MAX_COUNT];
6877 const bool m_OverlappingMoveSupported;
6879 uint32_t m_AllocationCount;
6880 bool m_AllAllocations;
6882 VkDeviceSize m_BytesMoved;
6883 uint32_t m_AllocationsMoved;
6885 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6887 void PreprocessMetadata();
6888 void PostprocessMetadata();
6889 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6892 struct VmaBlockDefragmentationContext
6896 BLOCK_FLAG_USED = 0x00000001,
6902 class VmaBlockVectorDefragmentationContext
6904 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6908 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6909 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
6910 uint32_t defragmentationMovesProcessed;
6911 uint32_t defragmentationMovesCommitted;
6912 bool hasDefragmentationPlan;
6914 VmaBlockVectorDefragmentationContext(
6917 VmaBlockVector* pBlockVector,
6918 uint32_t currFrameIndex);
6919 ~VmaBlockVectorDefragmentationContext();
6921 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6922 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6923 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6925 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6926 void AddAll() { m_AllAllocations =
true; }
6935 VmaBlockVector*
const m_pBlockVector;
6936 const uint32_t m_CurrFrameIndex;
6938 VmaDefragmentationAlgorithm* m_pAlgorithm;
6946 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6947 bool m_AllAllocations;
6950 struct VmaDefragmentationContext_T
6953 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6955 VmaDefragmentationContext_T(
6957 uint32_t currFrameIndex,
6960 ~VmaDefragmentationContext_T();
6962 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6963 void AddAllocations(
6964 uint32_t allocationCount,
6966 VkBool32* pAllocationsChanged);
6974 VkResult Defragment(
6975 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6976 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6980 VkResult DefragmentPassEnd();
6984 const uint32_t m_CurrFrameIndex;
6985 const uint32_t m_Flags;
6988 VkDeviceSize m_MaxCpuBytesToMove;
6989 uint32_t m_MaxCpuAllocationsToMove;
6990 VkDeviceSize m_MaxGpuBytesToMove;
6991 uint32_t m_MaxGpuAllocationsToMove;
6994 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6996 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6999 #if VMA_RECORDING_ENABLED
7006 void WriteConfiguration(
7007 const VkPhysicalDeviceProperties& devProps,
7008 const VkPhysicalDeviceMemoryProperties& memProps,
7009 uint32_t vulkanApiVersion,
7010 bool dedicatedAllocationExtensionEnabled,
7011 bool bindMemory2ExtensionEnabled,
7012 bool memoryBudgetExtensionEnabled,
7013 bool deviceCoherentMemoryExtensionEnabled);
7016 void RecordCreateAllocator(uint32_t frameIndex);
7017 void RecordDestroyAllocator(uint32_t frameIndex);
7018 void RecordCreatePool(uint32_t frameIndex,
7021 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7022 void RecordAllocateMemory(uint32_t frameIndex,
7023 const VkMemoryRequirements& vkMemReq,
7026 void RecordAllocateMemoryPages(uint32_t frameIndex,
7027 const VkMemoryRequirements& vkMemReq,
7029 uint64_t allocationCount,
7031 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7032 const VkMemoryRequirements& vkMemReq,
7033 bool requiresDedicatedAllocation,
7034 bool prefersDedicatedAllocation,
7037 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7038 const VkMemoryRequirements& vkMemReq,
7039 bool requiresDedicatedAllocation,
7040 bool prefersDedicatedAllocation,
7043 void RecordFreeMemory(uint32_t frameIndex,
7045 void RecordFreeMemoryPages(uint32_t frameIndex,
7046 uint64_t allocationCount,
7048 void RecordSetAllocationUserData(uint32_t frameIndex,
7050 const void* pUserData);
7051 void RecordCreateLostAllocation(uint32_t frameIndex,
7053 void RecordMapMemory(uint32_t frameIndex,
7055 void RecordUnmapMemory(uint32_t frameIndex,
7057 void RecordFlushAllocation(uint32_t frameIndex,
7058 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7059 void RecordInvalidateAllocation(uint32_t frameIndex,
7060 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7061 void RecordCreateBuffer(uint32_t frameIndex,
7062 const VkBufferCreateInfo& bufCreateInfo,
7065 void RecordCreateImage(uint32_t frameIndex,
7066 const VkImageCreateInfo& imageCreateInfo,
7069 void RecordDestroyBuffer(uint32_t frameIndex,
7071 void RecordDestroyImage(uint32_t frameIndex,
7073 void RecordTouchAllocation(uint32_t frameIndex,
7075 void RecordGetAllocationInfo(uint32_t frameIndex,
7077 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7079 void RecordDefragmentationBegin(uint32_t frameIndex,
7082 void RecordDefragmentationEnd(uint32_t frameIndex,
7084 void RecordSetPoolName(uint32_t frameIndex,
7095 class UserDataString
7099 const char* GetString()
const {
return m_Str; }
7109 VMA_MUTEX m_FileMutex;
7111 int64_t m_StartCounter;
7113 void GetBasicParams(CallParams& outParams);
7116 template<
typename T>
7117 void PrintPointerList(uint64_t count,
const T* pItems)
7121 fprintf(m_File,
"%p", pItems[0]);
7122 for(uint64_t i = 1; i < count; ++i)
7124 fprintf(m_File,
" %p", pItems[i]);
7129 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7133 #endif // #if VMA_RECORDING_ENABLED
7138 class VmaAllocationObjectAllocator
7140 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7142 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7144 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7149 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7152 struct VmaCurrentBudgetData
7154 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7155 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7157 #if VMA_MEMORY_BUDGET
7158 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7159 VMA_RW_MUTEX m_BudgetMutex;
7160 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7161 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7162 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7163 #endif // #if VMA_MEMORY_BUDGET
7165 VmaCurrentBudgetData()
7167 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7169 m_BlockBytes[heapIndex] = 0;
7170 m_AllocationBytes[heapIndex] = 0;
7171 #if VMA_MEMORY_BUDGET
7172 m_VulkanUsage[heapIndex] = 0;
7173 m_VulkanBudget[heapIndex] = 0;
7174 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7178 #if VMA_MEMORY_BUDGET
7179 m_OperationsSinceBudgetFetch = 0;
7183 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7185 m_AllocationBytes[heapIndex] += allocationSize;
7186 #if VMA_MEMORY_BUDGET
7187 ++m_OperationsSinceBudgetFetch;
7191 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7193 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7194 m_AllocationBytes[heapIndex] -= allocationSize;
7195 #if VMA_MEMORY_BUDGET
7196 ++m_OperationsSinceBudgetFetch;
7202 struct VmaAllocator_T
7204 VMA_CLASS_NO_COPY(VmaAllocator_T)
7207 uint32_t m_VulkanApiVersion;
7208 bool m_UseKhrDedicatedAllocation;
7209 bool m_UseKhrBindMemory2;
7210 bool m_UseExtMemoryBudget;
7211 bool m_UseAmdDeviceCoherentMemory;
7213 VkInstance m_hInstance;
7214 bool m_AllocationCallbacksSpecified;
7215 VkAllocationCallbacks m_AllocationCallbacks;
7217 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7220 uint32_t m_HeapSizeLimitMask;
7222 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7223 VkPhysicalDeviceMemoryProperties m_MemProps;
7226 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7229 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7230 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7231 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7233 VmaCurrentBudgetData m_Budget;
7239 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7241 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7245 return m_VulkanFunctions;
7248 VkDeviceSize GetBufferImageGranularity()
const
7251 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7252 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7255 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7256 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7258 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7260 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7261 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7264 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7266 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7267 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7270 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7272 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7273 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7274 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7277 bool IsIntegratedGpu()
const
7279 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7282 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7284 #if VMA_RECORDING_ENABLED
7285 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7288 void GetBufferMemoryRequirements(
7290 VkMemoryRequirements& memReq,
7291 bool& requiresDedicatedAllocation,
7292 bool& prefersDedicatedAllocation)
const;
7293 void GetImageMemoryRequirements(
7295 VkMemoryRequirements& memReq,
7296 bool& requiresDedicatedAllocation,
7297 bool& prefersDedicatedAllocation)
const;
7300 VkResult AllocateMemory(
7301 const VkMemoryRequirements& vkMemReq,
7302 bool requiresDedicatedAllocation,
7303 bool prefersDedicatedAllocation,
7304 VkBuffer dedicatedBuffer,
7305 VkImage dedicatedImage,
7307 VmaSuballocationType suballocType,
7308 size_t allocationCount,
7313 size_t allocationCount,
7316 VkResult ResizeAllocation(
7318 VkDeviceSize newSize);
7320 void CalculateStats(
VmaStats* pStats);
7323 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7325 #if VMA_STATS_STRING_ENABLED
7326 void PrintDetailedMap(
class VmaJsonWriter& json);
7329 VkResult DefragmentationBegin(
7333 VkResult DefragmentationEnd(
7336 VkResult DefragmentationPassBegin(
7339 VkResult DefragmentationPassEnd(
7346 void DestroyPool(
VmaPool pool);
7349 void SetCurrentFrameIndex(uint32_t frameIndex);
7350 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7352 void MakePoolAllocationsLost(
7354 size_t* pLostAllocationCount);
7355 VkResult CheckPoolCorruption(
VmaPool hPool);
7356 VkResult CheckCorruption(uint32_t memoryTypeBits);
7361 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7363 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7365 VkResult BindVulkanBuffer(
7366 VkDeviceMemory memory,
7367 VkDeviceSize memoryOffset,
7371 VkResult BindVulkanImage(
7372 VkDeviceMemory memory,
7373 VkDeviceSize memoryOffset,
7380 VkResult BindBufferMemory(
7382 VkDeviceSize allocationLocalOffset,
7385 VkResult BindImageMemory(
7387 VkDeviceSize allocationLocalOffset,
7391 void FlushOrInvalidateAllocation(
7393 VkDeviceSize offset, VkDeviceSize size,
7394 VMA_CACHE_OPERATION op);
7396 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7402 uint32_t GetGpuDefragmentationMemoryTypeBits();
7405 VkDeviceSize m_PreferredLargeHeapBlockSize;
7407 VkPhysicalDevice m_PhysicalDevice;
7408 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7409 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7411 VMA_RW_MUTEX m_PoolsMutex;
7413 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7414 uint32_t m_NextPoolId;
7419 uint32_t m_GlobalMemoryTypeBits;
7421 #if VMA_RECORDING_ENABLED
7422 VmaRecorder* m_pRecorder;
7427 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7429 VkResult AllocateMemoryOfType(
7431 VkDeviceSize alignment,
7432 bool dedicatedAllocation,
7433 VkBuffer dedicatedBuffer,
7434 VkImage dedicatedImage,
7436 uint32_t memTypeIndex,
7437 VmaSuballocationType suballocType,
7438 size_t allocationCount,
7442 VkResult AllocateDedicatedMemoryPage(
7444 VmaSuballocationType suballocType,
7445 uint32_t memTypeIndex,
7446 const VkMemoryAllocateInfo& allocInfo,
7448 bool isUserDataString,
7453 VkResult AllocateDedicatedMemory(
7455 VmaSuballocationType suballocType,
7456 uint32_t memTypeIndex,
7459 bool isUserDataString,
7461 VkBuffer dedicatedBuffer,
7462 VkImage dedicatedImage,
7463 size_t allocationCount,
7472 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7474 uint32_t CalculateGlobalMemoryTypeBits()
const;
7476 #if VMA_MEMORY_BUDGET
7477 void UpdateVulkanBudget();
7478 #endif // #if VMA_MEMORY_BUDGET
7484 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7486 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7489 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7491 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7494 template<
typename T>
7497 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7500 template<
typename T>
7501 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7503 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7506 template<
typename T>
7507 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7512 VmaFree(hAllocator, ptr);
7516 template<
typename T>
7517 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7521 for(
size_t i = count; i--; )
7523 VmaFree(hAllocator, ptr);
7530 #if VMA_STATS_STRING_ENABLED
7532 class VmaStringBuilder
7535 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7536 size_t GetLength()
const {
return m_Data.size(); }
7537 const char* GetData()
const {
return m_Data.data(); }
7539 void Add(
char ch) { m_Data.push_back(ch); }
7540 void Add(
const char* pStr);
7541 void AddNewLine() { Add(
'\n'); }
7542 void AddNumber(uint32_t num);
7543 void AddNumber(uint64_t num);
7544 void AddPointer(
const void* ptr);
7547 VmaVector< char, VmaStlAllocator<char> > m_Data;
7550 void VmaStringBuilder::Add(
const char* pStr)
7552 const size_t strLen = strlen(pStr);
7555 const size_t oldCount = m_Data.size();
7556 m_Data.resize(oldCount + strLen);
7557 memcpy(m_Data.data() + oldCount, pStr, strLen);
7561 void VmaStringBuilder::AddNumber(uint32_t num)
7568 *--p =
'0' + (num % 10);
7575 void VmaStringBuilder::AddNumber(uint64_t num)
7582 *--p =
'0' + (num % 10);
7589 void VmaStringBuilder::AddPointer(
const void* ptr)
7592 VmaPtrToStr(buf,
sizeof(buf), ptr);
7596 #endif // #if VMA_STATS_STRING_ENABLED
7601 #if VMA_STATS_STRING_ENABLED
7605 VMA_CLASS_NO_COPY(VmaJsonWriter)
7607 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7610 void BeginObject(
bool singleLine =
false);
7613 void BeginArray(
bool singleLine =
false);
7616 void WriteString(
const char* pStr);
7617 void BeginString(
const char* pStr = VMA_NULL);
7618 void ContinueString(
const char* pStr);
7619 void ContinueString(uint32_t n);
7620 void ContinueString(uint64_t n);
7621 void ContinueString_Pointer(
const void* ptr);
7622 void EndString(
const char* pStr = VMA_NULL);
7624 void WriteNumber(uint32_t n);
7625 void WriteNumber(uint64_t n);
7626 void WriteBool(
bool b);
7630 static const char*
const INDENT;
7632 enum COLLECTION_TYPE
7634 COLLECTION_TYPE_OBJECT,
7635 COLLECTION_TYPE_ARRAY,
7639 COLLECTION_TYPE type;
7640 uint32_t valueCount;
7641 bool singleLineMode;
7644 VmaStringBuilder& m_SB;
7645 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7646 bool m_InsideString;
7648 void BeginValue(
bool isString);
7649 void WriteIndent(
bool oneLess =
false);
7652 const char*
const VmaJsonWriter::INDENT =
" ";
7654 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7656 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7657 m_InsideString(false)
7661 VmaJsonWriter::~VmaJsonWriter()
7663 VMA_ASSERT(!m_InsideString);
7664 VMA_ASSERT(m_Stack.empty());
7667 void VmaJsonWriter::BeginObject(
bool singleLine)
7669 VMA_ASSERT(!m_InsideString);
7675 item.type = COLLECTION_TYPE_OBJECT;
7676 item.valueCount = 0;
7677 item.singleLineMode = singleLine;
7678 m_Stack.push_back(item);
7681 void VmaJsonWriter::EndObject()
7683 VMA_ASSERT(!m_InsideString);
7688 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7692 void VmaJsonWriter::BeginArray(
bool singleLine)
7694 VMA_ASSERT(!m_InsideString);
7700 item.type = COLLECTION_TYPE_ARRAY;
7701 item.valueCount = 0;
7702 item.singleLineMode = singleLine;
7703 m_Stack.push_back(item);
7706 void VmaJsonWriter::EndArray()
7708 VMA_ASSERT(!m_InsideString);
7713 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7717 void VmaJsonWriter::WriteString(
const char* pStr)
7723 void VmaJsonWriter::BeginString(
const char* pStr)
7725 VMA_ASSERT(!m_InsideString);
7729 m_InsideString =
true;
7730 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7732 ContinueString(pStr);
7736 void VmaJsonWriter::ContinueString(
const char* pStr)
7738 VMA_ASSERT(m_InsideString);
7740 const size_t strLen = strlen(pStr);
7741 for(
size_t i = 0; i < strLen; ++i)
7774 VMA_ASSERT(0 &&
"Character not currently supported.");
7780 void VmaJsonWriter::ContinueString(uint32_t n)
7782 VMA_ASSERT(m_InsideString);
7786 void VmaJsonWriter::ContinueString(uint64_t n)
7788 VMA_ASSERT(m_InsideString);
7792 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7794 VMA_ASSERT(m_InsideString);
7795 m_SB.AddPointer(ptr);
7798 void VmaJsonWriter::EndString(
const char* pStr)
7800 VMA_ASSERT(m_InsideString);
7801 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7803 ContinueString(pStr);
7806 m_InsideString =
false;
7809 void VmaJsonWriter::WriteNumber(uint32_t n)
7811 VMA_ASSERT(!m_InsideString);
7816 void VmaJsonWriter::WriteNumber(uint64_t n)
7818 VMA_ASSERT(!m_InsideString);
7823 void VmaJsonWriter::WriteBool(
bool b)
7825 VMA_ASSERT(!m_InsideString);
7827 m_SB.Add(b ?
"true" :
"false");
7830 void VmaJsonWriter::WriteNull()
7832 VMA_ASSERT(!m_InsideString);
7837 void VmaJsonWriter::BeginValue(
bool isString)
7839 if(!m_Stack.empty())
7841 StackItem& currItem = m_Stack.back();
7842 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7843 currItem.valueCount % 2 == 0)
7845 VMA_ASSERT(isString);
7848 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7849 currItem.valueCount % 2 != 0)
7853 else if(currItem.valueCount > 0)
7862 ++currItem.valueCount;
7866 void VmaJsonWriter::WriteIndent(
bool oneLess)
7868 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7872 size_t count = m_Stack.size();
7873 if(count > 0 && oneLess)
7877 for(
size_t i = 0; i < count; ++i)
7884 #endif // #if VMA_STATS_STRING_ENABLED
7888 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7890 if(IsUserDataString())
7892 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7894 FreeUserDataString(hAllocator);
7896 if(pUserData != VMA_NULL)
7898 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7903 m_pUserData = pUserData;
7907 void VmaAllocation_T::ChangeBlockAllocation(
7909 VmaDeviceMemoryBlock* block,
7910 VkDeviceSize offset)
7912 VMA_ASSERT(block != VMA_NULL);
7913 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7916 if(block != m_BlockAllocation.m_Block)
7918 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7919 if(IsPersistentMap())
7921 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7922 block->Map(hAllocator, mapRefCount, VMA_NULL);
7925 m_BlockAllocation.m_Block = block;
7926 m_BlockAllocation.m_Offset = offset;
7929 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7931 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7932 m_BlockAllocation.m_Offset = newOffset;
7935 VkDeviceSize VmaAllocation_T::GetOffset()
const
7939 case ALLOCATION_TYPE_BLOCK:
7940 return m_BlockAllocation.m_Offset;
7941 case ALLOCATION_TYPE_DEDICATED:
7949 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7953 case ALLOCATION_TYPE_BLOCK:
7954 return m_BlockAllocation.m_Block->GetDeviceMemory();
7955 case ALLOCATION_TYPE_DEDICATED:
7956 return m_DedicatedAllocation.m_hMemory;
7959 return VK_NULL_HANDLE;
7963 void* VmaAllocation_T::GetMappedData()
const
7967 case ALLOCATION_TYPE_BLOCK:
7970 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7971 VMA_ASSERT(pBlockData != VMA_NULL);
7972 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7979 case ALLOCATION_TYPE_DEDICATED:
7980 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7981 return m_DedicatedAllocation.m_pMappedData;
7988 bool VmaAllocation_T::CanBecomeLost()
const
7992 case ALLOCATION_TYPE_BLOCK:
7993 return m_BlockAllocation.m_CanBecomeLost;
7994 case ALLOCATION_TYPE_DEDICATED:
8002 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8004 VMA_ASSERT(CanBecomeLost());
8010 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8013 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8018 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8024 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8034 #if VMA_STATS_STRING_ENABLED
8037 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8046 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8048 json.WriteString(
"Type");
8049 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8051 json.WriteString(
"Size");
8052 json.WriteNumber(m_Size);
8054 if(m_pUserData != VMA_NULL)
8056 json.WriteString(
"UserData");
8057 if(IsUserDataString())
8059 json.WriteString((
const char*)m_pUserData);
8064 json.ContinueString_Pointer(m_pUserData);
8069 json.WriteString(
"CreationFrameIndex");
8070 json.WriteNumber(m_CreationFrameIndex);
8072 json.WriteString(
"LastUseFrameIndex");
8073 json.WriteNumber(GetLastUseFrameIndex());
8075 if(m_BufferImageUsage != 0)
8077 json.WriteString(
"Usage");
8078 json.WriteNumber(m_BufferImageUsage);
8084 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8086 VMA_ASSERT(IsUserDataString());
8087 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8088 m_pUserData = VMA_NULL;
8091 void VmaAllocation_T::BlockAllocMap()
8093 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8095 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8101 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8105 void VmaAllocation_T::BlockAllocUnmap()
8107 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8109 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8115 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8119 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8121 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8125 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8127 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8128 *ppData = m_DedicatedAllocation.m_pMappedData;
8134 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8135 return VK_ERROR_MEMORY_MAP_FAILED;
8140 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8141 hAllocator->m_hDevice,
8142 m_DedicatedAllocation.m_hMemory,
8147 if(result == VK_SUCCESS)
8149 m_DedicatedAllocation.m_pMappedData = *ppData;
8156 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8158 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8160 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8165 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8166 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8167 hAllocator->m_hDevice,
8168 m_DedicatedAllocation.m_hMemory);
8173 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8177 #if VMA_STATS_STRING_ENABLED
8179 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8183 json.WriteString(
"Blocks");
8186 json.WriteString(
"Allocations");
8189 json.WriteString(
"UnusedRanges");
8192 json.WriteString(
"UsedBytes");
8195 json.WriteString(
"UnusedBytes");
8200 json.WriteString(
"AllocationSize");
8201 json.BeginObject(
true);
8202 json.WriteString(
"Min");
8204 json.WriteString(
"Avg");
8206 json.WriteString(
"Max");
8213 json.WriteString(
"UnusedRangeSize");
8214 json.BeginObject(
true);
8215 json.WriteString(
"Min");
8217 json.WriteString(
"Avg");
8219 json.WriteString(
"Max");
8227 #endif // #if VMA_STATS_STRING_ENABLED
8229 struct VmaSuballocationItemSizeLess
8232 const VmaSuballocationList::iterator lhs,
8233 const VmaSuballocationList::iterator rhs)
const
8235 return lhs->size < rhs->size;
8238 const VmaSuballocationList::iterator lhs,
8239 VkDeviceSize rhsSize)
const
8241 return lhs->size < rhsSize;
8249 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8251 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8255 #if VMA_STATS_STRING_ENABLED
8257 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8258 VkDeviceSize unusedBytes,
8259 size_t allocationCount,
8260 size_t unusedRangeCount)
const
8264 json.WriteString(
"TotalBytes");
8265 json.WriteNumber(GetSize());
8267 json.WriteString(
"UnusedBytes");
8268 json.WriteNumber(unusedBytes);
8270 json.WriteString(
"Allocations");
8271 json.WriteNumber((uint64_t)allocationCount);
8273 json.WriteString(
"UnusedRanges");
8274 json.WriteNumber((uint64_t)unusedRangeCount);
8276 json.WriteString(
"Suballocations");
8280 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8281 VkDeviceSize offset,
8284 json.BeginObject(
true);
8286 json.WriteString(
"Offset");
8287 json.WriteNumber(offset);
8289 hAllocation->PrintParameters(json);
8294 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8295 VkDeviceSize offset,
8296 VkDeviceSize size)
const
8298 json.BeginObject(
true);
8300 json.WriteString(
"Offset");
8301 json.WriteNumber(offset);
8303 json.WriteString(
"Type");
8304 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8306 json.WriteString(
"Size");
8307 json.WriteNumber(size);
8312 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8318 #endif // #if VMA_STATS_STRING_ENABLED
8323 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8324 VmaBlockMetadata(hAllocator),
8327 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8328 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8332 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8336 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8338 VmaBlockMetadata::Init(size);
8341 m_SumFreeSize = size;
8343 VmaSuballocation suballoc = {};
8344 suballoc.offset = 0;
8345 suballoc.size = size;
8346 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8347 suballoc.hAllocation = VK_NULL_HANDLE;
8349 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8350 m_Suballocations.push_back(suballoc);
8351 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8353 m_FreeSuballocationsBySize.push_back(suballocItem);
8356 bool VmaBlockMetadata_Generic::Validate()
const
8358 VMA_VALIDATE(!m_Suballocations.empty());
8361 VkDeviceSize calculatedOffset = 0;
8363 uint32_t calculatedFreeCount = 0;
8365 VkDeviceSize calculatedSumFreeSize = 0;
8368 size_t freeSuballocationsToRegister = 0;
8370 bool prevFree =
false;
8372 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8373 suballocItem != m_Suballocations.cend();
8376 const VmaSuballocation& subAlloc = *suballocItem;
8379 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8381 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8383 VMA_VALIDATE(!prevFree || !currFree);
8385 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8389 calculatedSumFreeSize += subAlloc.size;
8390 ++calculatedFreeCount;
8391 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8393 ++freeSuballocationsToRegister;
8397 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8401 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8402 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8405 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8408 calculatedOffset += subAlloc.size;
8409 prevFree = currFree;
8414 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8416 VkDeviceSize lastSize = 0;
8417 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8419 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8422 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8424 VMA_VALIDATE(suballocItem->size >= lastSize);
8426 lastSize = suballocItem->size;
8430 VMA_VALIDATE(ValidateFreeSuballocationList());
8431 VMA_VALIDATE(calculatedOffset == GetSize());
8432 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8433 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8438 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8440 if(!m_FreeSuballocationsBySize.empty())
8442 return m_FreeSuballocationsBySize.back()->size;
8450 bool VmaBlockMetadata_Generic::IsEmpty()
const
8452 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8455 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8459 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8471 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8472 suballocItem != m_Suballocations.cend();
8475 const VmaSuballocation& suballoc = *suballocItem;
8476 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8489 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8491 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8493 inoutStats.
size += GetSize();
8500 #if VMA_STATS_STRING_ENABLED
8502 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8504 PrintDetailedMap_Begin(json,
8506 m_Suballocations.size() - (size_t)m_FreeCount,
8510 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8511 suballocItem != m_Suballocations.cend();
8512 ++suballocItem, ++i)
8514 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8516 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8520 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8524 PrintDetailedMap_End(json);
8527 #endif // #if VMA_STATS_STRING_ENABLED
8529 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8530 uint32_t currentFrameIndex,
8531 uint32_t frameInUseCount,
8532 VkDeviceSize bufferImageGranularity,
8533 VkDeviceSize allocSize,
8534 VkDeviceSize allocAlignment,
8536 VmaSuballocationType allocType,
8537 bool canMakeOtherLost,
8539 VmaAllocationRequest* pAllocationRequest)
8541 VMA_ASSERT(allocSize > 0);
8542 VMA_ASSERT(!upperAddress);
8543 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8544 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8545 VMA_HEAVY_ASSERT(Validate());
8547 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8550 if(canMakeOtherLost ==
false &&
8551 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8557 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8558 if(freeSuballocCount > 0)
8563 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8564 m_FreeSuballocationsBySize.data(),
8565 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8566 allocSize + 2 * VMA_DEBUG_MARGIN,
8567 VmaSuballocationItemSizeLess());
8568 size_t index = it - m_FreeSuballocationsBySize.data();
8569 for(; index < freeSuballocCount; ++index)
8574 bufferImageGranularity,
8578 m_FreeSuballocationsBySize[index],
8580 &pAllocationRequest->offset,
8581 &pAllocationRequest->itemsToMakeLostCount,
8582 &pAllocationRequest->sumFreeSize,
8583 &pAllocationRequest->sumItemSize))
8585 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8590 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8592 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8593 it != m_Suballocations.end();
8596 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8599 bufferImageGranularity,
8605 &pAllocationRequest->offset,
8606 &pAllocationRequest->itemsToMakeLostCount,
8607 &pAllocationRequest->sumFreeSize,
8608 &pAllocationRequest->sumItemSize))
8610 pAllocationRequest->item = it;
8618 for(
size_t index = freeSuballocCount; index--; )
8623 bufferImageGranularity,
8627 m_FreeSuballocationsBySize[index],
8629 &pAllocationRequest->offset,
8630 &pAllocationRequest->itemsToMakeLostCount,
8631 &pAllocationRequest->sumFreeSize,
8632 &pAllocationRequest->sumItemSize))
8634 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8641 if(canMakeOtherLost)
8646 VmaAllocationRequest tmpAllocRequest = {};
8647 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8648 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8649 suballocIt != m_Suballocations.end();
8652 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8653 suballocIt->hAllocation->CanBecomeLost())
8658 bufferImageGranularity,
8664 &tmpAllocRequest.offset,
8665 &tmpAllocRequest.itemsToMakeLostCount,
8666 &tmpAllocRequest.sumFreeSize,
8667 &tmpAllocRequest.sumItemSize))
8671 *pAllocationRequest = tmpAllocRequest;
8672 pAllocationRequest->item = suballocIt;
8675 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8677 *pAllocationRequest = tmpAllocRequest;
8678 pAllocationRequest->item = suballocIt;
8691 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8692 uint32_t currentFrameIndex,
8693 uint32_t frameInUseCount,
8694 VmaAllocationRequest* pAllocationRequest)
8696 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8698 while(pAllocationRequest->itemsToMakeLostCount > 0)
8700 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8702 ++pAllocationRequest->item;
8704 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8705 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8706 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8707 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8709 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8710 --pAllocationRequest->itemsToMakeLostCount;
8718 VMA_HEAVY_ASSERT(Validate());
8719 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8720 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8725 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8727 uint32_t lostAllocationCount = 0;
8728 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8729 it != m_Suballocations.end();
8732 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8733 it->hAllocation->CanBecomeLost() &&
8734 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8736 it = FreeSuballocation(it);
8737 ++lostAllocationCount;
8740 return lostAllocationCount;
8743 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8745 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8746 it != m_Suballocations.end();
8749 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8751 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8753 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8754 return VK_ERROR_VALIDATION_FAILED_EXT;
8756 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8758 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8759 return VK_ERROR_VALIDATION_FAILED_EXT;
8767 void VmaBlockMetadata_Generic::Alloc(
8768 const VmaAllocationRequest& request,
8769 VmaSuballocationType type,
8770 VkDeviceSize allocSize,
8773 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8774 VMA_ASSERT(request.item != m_Suballocations.end());
8775 VmaSuballocation& suballoc = *request.item;
8777 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8779 VMA_ASSERT(request.offset >= suballoc.offset);
8780 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8781 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8782 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8786 UnregisterFreeSuballocation(request.item);
8788 suballoc.offset = request.offset;
8789 suballoc.size = allocSize;
8790 suballoc.type = type;
8791 suballoc.hAllocation = hAllocation;
8796 VmaSuballocation paddingSuballoc = {};
8797 paddingSuballoc.offset = request.offset + allocSize;
8798 paddingSuballoc.size = paddingEnd;
8799 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8800 VmaSuballocationList::iterator next = request.item;
8802 const VmaSuballocationList::iterator paddingEndItem =
8803 m_Suballocations.insert(next, paddingSuballoc);
8804 RegisterFreeSuballocation(paddingEndItem);
8810 VmaSuballocation paddingSuballoc = {};
8811 paddingSuballoc.offset = request.offset - paddingBegin;
8812 paddingSuballoc.size = paddingBegin;
8813 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8814 const VmaSuballocationList::iterator paddingBeginItem =
8815 m_Suballocations.insert(request.item, paddingSuballoc);
8816 RegisterFreeSuballocation(paddingBeginItem);
8820 m_FreeCount = m_FreeCount - 1;
8821 if(paddingBegin > 0)
8829 m_SumFreeSize -= allocSize;
8832 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8834 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8835 suballocItem != m_Suballocations.end();
8838 VmaSuballocation& suballoc = *suballocItem;
8839 if(suballoc.hAllocation == allocation)
8841 FreeSuballocation(suballocItem);
8842 VMA_HEAVY_ASSERT(Validate());
8846 VMA_ASSERT(0 &&
"Not found!");
8849 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8851 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8852 suballocItem != m_Suballocations.end();
8855 VmaSuballocation& suballoc = *suballocItem;
8856 if(suballoc.offset == offset)
8858 FreeSuballocation(suballocItem);
8862 VMA_ASSERT(0 &&
"Not found!");
8865 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8867 VkDeviceSize lastSize = 0;
8868 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8870 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8872 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8873 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8874 VMA_VALIDATE(it->size >= lastSize);
8875 lastSize = it->size;
8880 bool VmaBlockMetadata_Generic::CheckAllocation(
8881 uint32_t currentFrameIndex,
8882 uint32_t frameInUseCount,
8883 VkDeviceSize bufferImageGranularity,
8884 VkDeviceSize allocSize,
8885 VkDeviceSize allocAlignment,
8886 VmaSuballocationType allocType,
8887 VmaSuballocationList::const_iterator suballocItem,
8888 bool canMakeOtherLost,
8889 VkDeviceSize* pOffset,
8890 size_t* itemsToMakeLostCount,
8891 VkDeviceSize* pSumFreeSize,
8892 VkDeviceSize* pSumItemSize)
const
8894 VMA_ASSERT(allocSize > 0);
8895 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8896 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8897 VMA_ASSERT(pOffset != VMA_NULL);
8899 *itemsToMakeLostCount = 0;
8903 if(canMakeOtherLost)
8905 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8907 *pSumFreeSize = suballocItem->size;
8911 if(suballocItem->hAllocation->CanBecomeLost() &&
8912 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8914 ++*itemsToMakeLostCount;
8915 *pSumItemSize = suballocItem->size;
8924 if(GetSize() - suballocItem->offset < allocSize)
8930 *pOffset = suballocItem->offset;
8933 if(VMA_DEBUG_MARGIN > 0)
8935 *pOffset += VMA_DEBUG_MARGIN;
8939 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8943 if(bufferImageGranularity > 1)
8945 bool bufferImageGranularityConflict =
false;
8946 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8947 while(prevSuballocItem != m_Suballocations.cbegin())
8950 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8951 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8953 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8955 bufferImageGranularityConflict =
true;
8963 if(bufferImageGranularityConflict)
8965 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8971 if(*pOffset >= suballocItem->offset + suballocItem->size)
8977 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8980 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8982 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8984 if(suballocItem->offset + totalSize > GetSize())
8991 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8992 if(totalSize > suballocItem->size)
8994 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8995 while(remainingSize > 0)
8998 if(lastSuballocItem == m_Suballocations.cend())
9002 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9004 *pSumFreeSize += lastSuballocItem->size;
9008 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9009 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9010 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9012 ++*itemsToMakeLostCount;
9013 *pSumItemSize += lastSuballocItem->size;
9020 remainingSize = (lastSuballocItem->size < remainingSize) ?
9021 remainingSize - lastSuballocItem->size : 0;
9027 if(bufferImageGranularity > 1)
9029 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9031 while(nextSuballocItem != m_Suballocations.cend())
9033 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9034 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9036 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9038 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9039 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9040 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9042 ++*itemsToMakeLostCount;
9061 const VmaSuballocation& suballoc = *suballocItem;
9062 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9064 *pSumFreeSize = suballoc.size;
9067 if(suballoc.size < allocSize)
9073 *pOffset = suballoc.offset;
9076 if(VMA_DEBUG_MARGIN > 0)
9078 *pOffset += VMA_DEBUG_MARGIN;
9082 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9086 if(bufferImageGranularity > 1)
9088 bool bufferImageGranularityConflict =
false;
9089 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9090 while(prevSuballocItem != m_Suballocations.cbegin())
9093 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9094 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9096 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9098 bufferImageGranularityConflict =
true;
9106 if(bufferImageGranularityConflict)
9108 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9113 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9116 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9119 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9126 if(bufferImageGranularity > 1)
9128 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9130 while(nextSuballocItem != m_Suballocations.cend())
9132 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9133 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9135 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9154 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9156 VMA_ASSERT(item != m_Suballocations.end());
9157 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9159 VmaSuballocationList::iterator nextItem = item;
9161 VMA_ASSERT(nextItem != m_Suballocations.end());
9162 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9164 item->size += nextItem->size;
9166 m_Suballocations.erase(nextItem);
9169 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9172 VmaSuballocation& suballoc = *suballocItem;
9173 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9174 suballoc.hAllocation = VK_NULL_HANDLE;
9178 m_SumFreeSize += suballoc.size;
9181 bool mergeWithNext =
false;
9182 bool mergeWithPrev =
false;
9184 VmaSuballocationList::iterator nextItem = suballocItem;
9186 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9188 mergeWithNext =
true;
9191 VmaSuballocationList::iterator prevItem = suballocItem;
9192 if(suballocItem != m_Suballocations.begin())
9195 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9197 mergeWithPrev =
true;
9203 UnregisterFreeSuballocation(nextItem);
9204 MergeFreeWithNext(suballocItem);
9209 UnregisterFreeSuballocation(prevItem);
9210 MergeFreeWithNext(prevItem);
9211 RegisterFreeSuballocation(prevItem);
9216 RegisterFreeSuballocation(suballocItem);
9217 return suballocItem;
9221 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9223 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9224 VMA_ASSERT(item->size > 0);
9228 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9230 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9232 if(m_FreeSuballocationsBySize.empty())
9234 m_FreeSuballocationsBySize.push_back(item);
9238 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9246 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9248 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9249 VMA_ASSERT(item->size > 0);
9253 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9255 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9257 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9258 m_FreeSuballocationsBySize.data(),
9259 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9261 VmaSuballocationItemSizeLess());
9262 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9263 index < m_FreeSuballocationsBySize.size();
9266 if(m_FreeSuballocationsBySize[index] == item)
9268 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9271 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9273 VMA_ASSERT(0 &&
"Not found.");
9279 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9280 VkDeviceSize bufferImageGranularity,
9281 VmaSuballocationType& inOutPrevSuballocType)
const
9283 if(bufferImageGranularity == 1 || IsEmpty())
9288 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9289 bool typeConflictFound =
false;
9290 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9291 it != m_Suballocations.cend();
9294 const VmaSuballocationType suballocType = it->type;
9295 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9297 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9298 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9300 typeConflictFound =
true;
9302 inOutPrevSuballocType = suballocType;
9306 return typeConflictFound || minAlignment >= bufferImageGranularity;
9312 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9313 VmaBlockMetadata(hAllocator),
9315 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9316 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9317 m_1stVectorIndex(0),
9318 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9319 m_1stNullItemsBeginCount(0),
9320 m_1stNullItemsMiddleCount(0),
9321 m_2ndNullItemsCount(0)
9325 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9329 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9331 VmaBlockMetadata::Init(size);
9332 m_SumFreeSize = size;
9335 bool VmaBlockMetadata_Linear::Validate()
const
9337 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9338 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9340 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9341 VMA_VALIDATE(!suballocations1st.empty() ||
9342 suballocations2nd.empty() ||
9343 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9345 if(!suballocations1st.empty())
9348 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9350 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9352 if(!suballocations2nd.empty())
9355 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9358 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9359 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9361 VkDeviceSize sumUsedSize = 0;
9362 const size_t suballoc1stCount = suballocations1st.size();
9363 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9365 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9367 const size_t suballoc2ndCount = suballocations2nd.size();
9368 size_t nullItem2ndCount = 0;
9369 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9371 const VmaSuballocation& suballoc = suballocations2nd[i];
9372 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9374 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9375 VMA_VALIDATE(suballoc.offset >= offset);
9379 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9380 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9381 sumUsedSize += suballoc.size;
9388 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9391 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9394 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9396 const VmaSuballocation& suballoc = suballocations1st[i];
9397 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9398 suballoc.hAllocation == VK_NULL_HANDLE);
9401 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9403 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9405 const VmaSuballocation& suballoc = suballocations1st[i];
9406 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9408 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9409 VMA_VALIDATE(suballoc.offset >= offset);
9410 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9414 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9415 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9416 sumUsedSize += suballoc.size;
9423 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9425 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9427 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9429 const size_t suballoc2ndCount = suballocations2nd.size();
9430 size_t nullItem2ndCount = 0;
9431 for(
size_t i = suballoc2ndCount; i--; )
9433 const VmaSuballocation& suballoc = suballocations2nd[i];
9434 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9436 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9437 VMA_VALIDATE(suballoc.offset >= offset);
9441 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9442 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9443 sumUsedSize += suballoc.size;
9450 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9453 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9456 VMA_VALIDATE(offset <= GetSize());
9457 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9462 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9464 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9465 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9468 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9470 const VkDeviceSize size = GetSize();
9482 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9484 switch(m_2ndVectorMode)
9486 case SECOND_VECTOR_EMPTY:
9492 const size_t suballocations1stCount = suballocations1st.size();
9493 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9494 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9495 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9497 firstSuballoc.offset,
9498 size - (lastSuballoc.offset + lastSuballoc.size));
9502 case SECOND_VECTOR_RING_BUFFER:
9507 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9508 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9509 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9510 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9514 case SECOND_VECTOR_DOUBLE_STACK:
9519 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9520 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9521 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9522 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9532 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9534 const VkDeviceSize size = GetSize();
9535 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9536 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9537 const size_t suballoc1stCount = suballocations1st.size();
9538 const size_t suballoc2ndCount = suballocations2nd.size();
9549 VkDeviceSize lastOffset = 0;
9551 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9553 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9554 size_t nextAlloc2ndIndex = 0;
9555 while(lastOffset < freeSpace2ndTo1stEnd)
9558 while(nextAlloc2ndIndex < suballoc2ndCount &&
9559 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9561 ++nextAlloc2ndIndex;
9565 if(nextAlloc2ndIndex < suballoc2ndCount)
9567 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9570 if(lastOffset < suballoc.offset)
9573 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9587 lastOffset = suballoc.offset + suballoc.size;
9588 ++nextAlloc2ndIndex;
9594 if(lastOffset < freeSpace2ndTo1stEnd)
9596 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9604 lastOffset = freeSpace2ndTo1stEnd;
9609 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9610 const VkDeviceSize freeSpace1stTo2ndEnd =
9611 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9612 while(lastOffset < freeSpace1stTo2ndEnd)
9615 while(nextAlloc1stIndex < suballoc1stCount &&
9616 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9618 ++nextAlloc1stIndex;
9622 if(nextAlloc1stIndex < suballoc1stCount)
9624 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9627 if(lastOffset < suballoc.offset)
9630 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9644 lastOffset = suballoc.offset + suballoc.size;
9645 ++nextAlloc1stIndex;
9651 if(lastOffset < freeSpace1stTo2ndEnd)
9653 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9661 lastOffset = freeSpace1stTo2ndEnd;
9665 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9667 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9668 while(lastOffset < size)
9671 while(nextAlloc2ndIndex != SIZE_MAX &&
9672 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9674 --nextAlloc2ndIndex;
9678 if(nextAlloc2ndIndex != SIZE_MAX)
9680 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9683 if(lastOffset < suballoc.offset)
9686 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9700 lastOffset = suballoc.offset + suballoc.size;
9701 --nextAlloc2ndIndex;
9707 if(lastOffset < size)
9709 const VkDeviceSize unusedRangeSize = size - lastOffset;
9725 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9727 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9728 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9729 const VkDeviceSize size = GetSize();
9730 const size_t suballoc1stCount = suballocations1st.size();
9731 const size_t suballoc2ndCount = suballocations2nd.size();
9733 inoutStats.
size += size;
9735 VkDeviceSize lastOffset = 0;
9737 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9739 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9740 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9741 while(lastOffset < freeSpace2ndTo1stEnd)
9744 while(nextAlloc2ndIndex < suballoc2ndCount &&
9745 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9747 ++nextAlloc2ndIndex;
9751 if(nextAlloc2ndIndex < suballoc2ndCount)
9753 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9756 if(lastOffset < suballoc.offset)
9759 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9770 lastOffset = suballoc.offset + suballoc.size;
9771 ++nextAlloc2ndIndex;
9776 if(lastOffset < freeSpace2ndTo1stEnd)
9779 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9786 lastOffset = freeSpace2ndTo1stEnd;
9791 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9792 const VkDeviceSize freeSpace1stTo2ndEnd =
9793 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9794 while(lastOffset < freeSpace1stTo2ndEnd)
9797 while(nextAlloc1stIndex < suballoc1stCount &&
9798 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9800 ++nextAlloc1stIndex;
9804 if(nextAlloc1stIndex < suballoc1stCount)
9806 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9809 if(lastOffset < suballoc.offset)
9812 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9823 lastOffset = suballoc.offset + suballoc.size;
9824 ++nextAlloc1stIndex;
9829 if(lastOffset < freeSpace1stTo2ndEnd)
9832 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9839 lastOffset = freeSpace1stTo2ndEnd;
9843 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9845 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9846 while(lastOffset < size)
9849 while(nextAlloc2ndIndex != SIZE_MAX &&
9850 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9852 --nextAlloc2ndIndex;
9856 if(nextAlloc2ndIndex != SIZE_MAX)
9858 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9861 if(lastOffset < suballoc.offset)
9864 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9875 lastOffset = suballoc.offset + suballoc.size;
9876 --nextAlloc2ndIndex;
9881 if(lastOffset < size)
9884 const VkDeviceSize unusedRangeSize = size - lastOffset;
9897 #if VMA_STATS_STRING_ENABLED
9898 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9900 const VkDeviceSize size = GetSize();
9901 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9902 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9903 const size_t suballoc1stCount = suballocations1st.size();
9904 const size_t suballoc2ndCount = suballocations2nd.size();
9908 size_t unusedRangeCount = 0;
9909 VkDeviceSize usedBytes = 0;
9911 VkDeviceSize lastOffset = 0;
9913 size_t alloc2ndCount = 0;
9914 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9916 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9917 size_t nextAlloc2ndIndex = 0;
9918 while(lastOffset < freeSpace2ndTo1stEnd)
9921 while(nextAlloc2ndIndex < suballoc2ndCount &&
9922 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9924 ++nextAlloc2ndIndex;
9928 if(nextAlloc2ndIndex < suballoc2ndCount)
9930 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9933 if(lastOffset < suballoc.offset)
9942 usedBytes += suballoc.size;
9945 lastOffset = suballoc.offset + suballoc.size;
9946 ++nextAlloc2ndIndex;
9951 if(lastOffset < freeSpace2ndTo1stEnd)
9958 lastOffset = freeSpace2ndTo1stEnd;
9963 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9964 size_t alloc1stCount = 0;
9965 const VkDeviceSize freeSpace1stTo2ndEnd =
9966 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9967 while(lastOffset < freeSpace1stTo2ndEnd)
9970 while(nextAlloc1stIndex < suballoc1stCount &&
9971 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9973 ++nextAlloc1stIndex;
9977 if(nextAlloc1stIndex < suballoc1stCount)
9979 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9982 if(lastOffset < suballoc.offset)
9991 usedBytes += suballoc.size;
9994 lastOffset = suballoc.offset + suballoc.size;
9995 ++nextAlloc1stIndex;
10000 if(lastOffset < size)
10003 ++unusedRangeCount;
10007 lastOffset = freeSpace1stTo2ndEnd;
10011 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10013 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10014 while(lastOffset < size)
10017 while(nextAlloc2ndIndex != SIZE_MAX &&
10018 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10020 --nextAlloc2ndIndex;
10024 if(nextAlloc2ndIndex != SIZE_MAX)
10026 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10029 if(lastOffset < suballoc.offset)
10032 ++unusedRangeCount;
10038 usedBytes += suballoc.size;
10041 lastOffset = suballoc.offset + suballoc.size;
10042 --nextAlloc2ndIndex;
10047 if(lastOffset < size)
10050 ++unusedRangeCount;
10059 const VkDeviceSize unusedBytes = size - usedBytes;
10060 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10065 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10067 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10068 size_t nextAlloc2ndIndex = 0;
10069 while(lastOffset < freeSpace2ndTo1stEnd)
10072 while(nextAlloc2ndIndex < suballoc2ndCount &&
10073 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10075 ++nextAlloc2ndIndex;
10079 if(nextAlloc2ndIndex < suballoc2ndCount)
10081 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10084 if(lastOffset < suballoc.offset)
10087 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10088 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10093 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10096 lastOffset = suballoc.offset + suballoc.size;
10097 ++nextAlloc2ndIndex;
10102 if(lastOffset < freeSpace2ndTo1stEnd)
10105 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10106 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10110 lastOffset = freeSpace2ndTo1stEnd;
10115 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10116 while(lastOffset < freeSpace1stTo2ndEnd)
10119 while(nextAlloc1stIndex < suballoc1stCount &&
10120 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10122 ++nextAlloc1stIndex;
10126 if(nextAlloc1stIndex < suballoc1stCount)
10128 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10131 if(lastOffset < suballoc.offset)
10134 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10135 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10140 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10143 lastOffset = suballoc.offset + suballoc.size;
10144 ++nextAlloc1stIndex;
10149 if(lastOffset < freeSpace1stTo2ndEnd)
10152 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10153 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10157 lastOffset = freeSpace1stTo2ndEnd;
10161 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10163 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10164 while(lastOffset < size)
10167 while(nextAlloc2ndIndex != SIZE_MAX &&
10168 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10170 --nextAlloc2ndIndex;
10174 if(nextAlloc2ndIndex != SIZE_MAX)
10176 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10179 if(lastOffset < suballoc.offset)
10182 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10183 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10188 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10191 lastOffset = suballoc.offset + suballoc.size;
10192 --nextAlloc2ndIndex;
10197 if(lastOffset < size)
10200 const VkDeviceSize unusedRangeSize = size - lastOffset;
10201 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10210 PrintDetailedMap_End(json);
10212 #endif // #if VMA_STATS_STRING_ENABLED
10214 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10215 uint32_t currentFrameIndex,
10216 uint32_t frameInUseCount,
10217 VkDeviceSize bufferImageGranularity,
10218 VkDeviceSize allocSize,
10219 VkDeviceSize allocAlignment,
10221 VmaSuballocationType allocType,
10222 bool canMakeOtherLost,
10224 VmaAllocationRequest* pAllocationRequest)
10226 VMA_ASSERT(allocSize > 0);
10227 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10228 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10229 VMA_HEAVY_ASSERT(Validate());
10230 return upperAddress ?
10231 CreateAllocationRequest_UpperAddress(
10232 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10233 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10234 CreateAllocationRequest_LowerAddress(
10235 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10236 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10239 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10240 uint32_t currentFrameIndex,
10241 uint32_t frameInUseCount,
10242 VkDeviceSize bufferImageGranularity,
10243 VkDeviceSize allocSize,
10244 VkDeviceSize allocAlignment,
10245 VmaSuballocationType allocType,
10246 bool canMakeOtherLost,
10248 VmaAllocationRequest* pAllocationRequest)
10250 const VkDeviceSize size = GetSize();
10251 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10252 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10254 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10256 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10261 if(allocSize > size)
10265 VkDeviceSize resultBaseOffset = size - allocSize;
10266 if(!suballocations2nd.empty())
10268 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10269 resultBaseOffset = lastSuballoc.offset - allocSize;
10270 if(allocSize > lastSuballoc.offset)
10277 VkDeviceSize resultOffset = resultBaseOffset;
10280 if(VMA_DEBUG_MARGIN > 0)
10282 if(resultOffset < VMA_DEBUG_MARGIN)
10286 resultOffset -= VMA_DEBUG_MARGIN;
10290 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10294 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10296 bool bufferImageGranularityConflict =
false;
10297 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10299 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10300 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10302 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10304 bufferImageGranularityConflict =
true;
10312 if(bufferImageGranularityConflict)
10314 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10319 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10320 suballocations1st.back().offset + suballocations1st.back().size :
10322 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10326 if(bufferImageGranularity > 1)
10328 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10330 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10331 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10333 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10347 pAllocationRequest->offset = resultOffset;
10348 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10349 pAllocationRequest->sumItemSize = 0;
10351 pAllocationRequest->itemsToMakeLostCount = 0;
10352 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10359 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10360 uint32_t currentFrameIndex,
10361 uint32_t frameInUseCount,
10362 VkDeviceSize bufferImageGranularity,
10363 VkDeviceSize allocSize,
10364 VkDeviceSize allocAlignment,
10365 VmaSuballocationType allocType,
10366 bool canMakeOtherLost,
10368 VmaAllocationRequest* pAllocationRequest)
10370 const VkDeviceSize size = GetSize();
10371 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10372 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10374 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10378 VkDeviceSize resultBaseOffset = 0;
10379 if(!suballocations1st.empty())
10381 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10382 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10386 VkDeviceSize resultOffset = resultBaseOffset;
10389 if(VMA_DEBUG_MARGIN > 0)
10391 resultOffset += VMA_DEBUG_MARGIN;
10395 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10399 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10401 bool bufferImageGranularityConflict =
false;
10402 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10404 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10405 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10407 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10409 bufferImageGranularityConflict =
true;
10417 if(bufferImageGranularityConflict)
10419 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10423 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10424 suballocations2nd.back().offset : size;
10427 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10431 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10433 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10435 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10436 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10438 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10452 pAllocationRequest->offset = resultOffset;
10453 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10454 pAllocationRequest->sumItemSize = 0;
10456 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10457 pAllocationRequest->itemsToMakeLostCount = 0;
10464 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10466 VMA_ASSERT(!suballocations1st.empty());
10468 VkDeviceSize resultBaseOffset = 0;
10469 if(!suballocations2nd.empty())
10471 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10472 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10476 VkDeviceSize resultOffset = resultBaseOffset;
10479 if(VMA_DEBUG_MARGIN > 0)
10481 resultOffset += VMA_DEBUG_MARGIN;
10485 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10489 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10491 bool bufferImageGranularityConflict =
false;
10492 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10494 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10495 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10497 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10499 bufferImageGranularityConflict =
true;
10507 if(bufferImageGranularityConflict)
10509 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10513 pAllocationRequest->itemsToMakeLostCount = 0;
10514 pAllocationRequest->sumItemSize = 0;
10515 size_t index1st = m_1stNullItemsBeginCount;
10517 if(canMakeOtherLost)
10519 while(index1st < suballocations1st.size() &&
10520 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10523 const VmaSuballocation& suballoc = suballocations1st[index1st];
10524 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10530 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10531 if(suballoc.hAllocation->CanBecomeLost() &&
10532 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10534 ++pAllocationRequest->itemsToMakeLostCount;
10535 pAllocationRequest->sumItemSize += suballoc.size;
10547 if(bufferImageGranularity > 1)
10549 while(index1st < suballocations1st.size())
10551 const VmaSuballocation& suballoc = suballocations1st[index1st];
10552 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10554 if(suballoc.hAllocation != VK_NULL_HANDLE)
10557 if(suballoc.hAllocation->CanBecomeLost() &&
10558 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10560 ++pAllocationRequest->itemsToMakeLostCount;
10561 pAllocationRequest->sumItemSize += suballoc.size;
10579 if(index1st == suballocations1st.size() &&
10580 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10583 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10588 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10589 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10593 if(bufferImageGranularity > 1)
10595 for(
size_t nextSuballocIndex = index1st;
10596 nextSuballocIndex < suballocations1st.size();
10597 nextSuballocIndex++)
10599 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10600 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10602 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10616 pAllocationRequest->offset = resultOffset;
10617 pAllocationRequest->sumFreeSize =
10618 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10620 - pAllocationRequest->sumItemSize;
10621 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10630 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10631 uint32_t currentFrameIndex,
10632 uint32_t frameInUseCount,
10633 VmaAllocationRequest* pAllocationRequest)
10635 if(pAllocationRequest->itemsToMakeLostCount == 0)
10640 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10643 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10644 size_t index = m_1stNullItemsBeginCount;
10645 size_t madeLostCount = 0;
10646 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10648 if(index == suballocations->size())
10652 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10654 suballocations = &AccessSuballocations2nd();
10658 VMA_ASSERT(!suballocations->empty());
10660 VmaSuballocation& suballoc = (*suballocations)[index];
10661 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10663 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10664 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10665 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10667 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10668 suballoc.hAllocation = VK_NULL_HANDLE;
10669 m_SumFreeSize += suballoc.size;
10670 if(suballocations == &AccessSuballocations1st())
10672 ++m_1stNullItemsMiddleCount;
10676 ++m_2ndNullItemsCount;
10688 CleanupAfterFree();
10694 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10696 uint32_t lostAllocationCount = 0;
10698 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10699 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10701 VmaSuballocation& suballoc = suballocations1st[i];
10702 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10703 suballoc.hAllocation->CanBecomeLost() &&
10704 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10706 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10707 suballoc.hAllocation = VK_NULL_HANDLE;
10708 ++m_1stNullItemsMiddleCount;
10709 m_SumFreeSize += suballoc.size;
10710 ++lostAllocationCount;
10714 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10715 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10717 VmaSuballocation& suballoc = suballocations2nd[i];
10718 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10719 suballoc.hAllocation->CanBecomeLost() &&
10720 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10722 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10723 suballoc.hAllocation = VK_NULL_HANDLE;
10724 ++m_2ndNullItemsCount;
10725 m_SumFreeSize += suballoc.size;
10726 ++lostAllocationCount;
10730 if(lostAllocationCount)
10732 CleanupAfterFree();
10735 return lostAllocationCount;
10738 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10740 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10741 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10743 const VmaSuballocation& suballoc = suballocations1st[i];
10744 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10746 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10748 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10749 return VK_ERROR_VALIDATION_FAILED_EXT;
10751 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10753 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10754 return VK_ERROR_VALIDATION_FAILED_EXT;
10759 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10760 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10762 const VmaSuballocation& suballoc = suballocations2nd[i];
10763 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10765 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10767 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10768 return VK_ERROR_VALIDATION_FAILED_EXT;
10770 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10772 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10773 return VK_ERROR_VALIDATION_FAILED_EXT;
10781 void VmaBlockMetadata_Linear::Alloc(
10782 const VmaAllocationRequest& request,
10783 VmaSuballocationType type,
10784 VkDeviceSize allocSize,
10787 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10789 switch(request.type)
10791 case VmaAllocationRequestType::UpperAddress:
10793 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10794 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10795 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10796 suballocations2nd.push_back(newSuballoc);
10797 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10800 case VmaAllocationRequestType::EndOf1st:
10802 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10804 VMA_ASSERT(suballocations1st.empty() ||
10805 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10807 VMA_ASSERT(request.offset + allocSize <= GetSize());
10809 suballocations1st.push_back(newSuballoc);
10812 case VmaAllocationRequestType::EndOf2nd:
10814 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10816 VMA_ASSERT(!suballocations1st.empty() &&
10817 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10818 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10820 switch(m_2ndVectorMode)
10822 case SECOND_VECTOR_EMPTY:
10824 VMA_ASSERT(suballocations2nd.empty());
10825 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10827 case SECOND_VECTOR_RING_BUFFER:
10829 VMA_ASSERT(!suballocations2nd.empty());
10831 case SECOND_VECTOR_DOUBLE_STACK:
10832 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10838 suballocations2nd.push_back(newSuballoc);
10842 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10845 m_SumFreeSize -= newSuballoc.size;
10848 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10850 FreeAtOffset(allocation->GetOffset());
10853 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10855 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10856 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10858 if(!suballocations1st.empty())
10861 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10862 if(firstSuballoc.offset == offset)
10864 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10865 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10866 m_SumFreeSize += firstSuballoc.size;
10867 ++m_1stNullItemsBeginCount;
10868 CleanupAfterFree();
10874 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10875 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10877 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10878 if(lastSuballoc.offset == offset)
10880 m_SumFreeSize += lastSuballoc.size;
10881 suballocations2nd.pop_back();
10882 CleanupAfterFree();
10887 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10889 VmaSuballocation& lastSuballoc = suballocations1st.back();
10890 if(lastSuballoc.offset == offset)
10892 m_SumFreeSize += lastSuballoc.size;
10893 suballocations1st.pop_back();
10894 CleanupAfterFree();
10901 VmaSuballocation refSuballoc;
10902 refSuballoc.offset = offset;
10904 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10905 suballocations1st.begin() + m_1stNullItemsBeginCount,
10906 suballocations1st.end(),
10908 VmaSuballocationOffsetLess());
10909 if(it != suballocations1st.end())
10911 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10912 it->hAllocation = VK_NULL_HANDLE;
10913 ++m_1stNullItemsMiddleCount;
10914 m_SumFreeSize += it->size;
10915 CleanupAfterFree();
10920 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10923 VmaSuballocation refSuballoc;
10924 refSuballoc.offset = offset;
10926 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10927 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10928 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10929 if(it != suballocations2nd.end())
10931 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10932 it->hAllocation = VK_NULL_HANDLE;
10933 ++m_2ndNullItemsCount;
10934 m_SumFreeSize += it->size;
10935 CleanupAfterFree();
10940 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10943 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10945 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10946 const size_t suballocCount = AccessSuballocations1st().size();
10947 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10950 void VmaBlockMetadata_Linear::CleanupAfterFree()
10952 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10953 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10957 suballocations1st.clear();
10958 suballocations2nd.clear();
10959 m_1stNullItemsBeginCount = 0;
10960 m_1stNullItemsMiddleCount = 0;
10961 m_2ndNullItemsCount = 0;
10962 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10966 const size_t suballoc1stCount = suballocations1st.size();
10967 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10968 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10971 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10972 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10974 ++m_1stNullItemsBeginCount;
10975 --m_1stNullItemsMiddleCount;
10979 while(m_1stNullItemsMiddleCount > 0 &&
10980 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10982 --m_1stNullItemsMiddleCount;
10983 suballocations1st.pop_back();
10987 while(m_2ndNullItemsCount > 0 &&
10988 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10990 --m_2ndNullItemsCount;
10991 suballocations2nd.pop_back();
10995 while(m_2ndNullItemsCount > 0 &&
10996 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10998 --m_2ndNullItemsCount;
10999 VmaVectorRemove(suballocations2nd, 0);
11002 if(ShouldCompact1st())
11004 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11005 size_t srcIndex = m_1stNullItemsBeginCount;
11006 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11008 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11012 if(dstIndex != srcIndex)
11014 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11018 suballocations1st.resize(nonNullItemCount);
11019 m_1stNullItemsBeginCount = 0;
11020 m_1stNullItemsMiddleCount = 0;
11024 if(suballocations2nd.empty())
11026 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11030 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11032 suballocations1st.clear();
11033 m_1stNullItemsBeginCount = 0;
11035 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11038 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11039 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11040 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11041 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11043 ++m_1stNullItemsBeginCount;
11044 --m_1stNullItemsMiddleCount;
11046 m_2ndNullItemsCount = 0;
11047 m_1stVectorIndex ^= 1;
11052 VMA_HEAVY_ASSERT(Validate());
11059 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11060 VmaBlockMetadata(hAllocator),
11062 m_AllocationCount(0),
11066 memset(m_FreeList, 0,
sizeof(m_FreeList));
11069 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11071 DeleteNode(m_Root);
11074 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11076 VmaBlockMetadata::Init(size);
11078 m_UsableSize = VmaPrevPow2(size);
11079 m_SumFreeSize = m_UsableSize;
11083 while(m_LevelCount < MAX_LEVELS &&
11084 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11089 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11090 rootNode->offset = 0;
11091 rootNode->type = Node::TYPE_FREE;
11092 rootNode->parent = VMA_NULL;
11093 rootNode->buddy = VMA_NULL;
11096 AddToFreeListFront(0, rootNode);
11099 bool VmaBlockMetadata_Buddy::Validate()
const
11102 ValidationContext ctx;
11103 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11105 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11107 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11108 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11111 for(uint32_t level = 0; level < m_LevelCount; ++level)
11113 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11114 m_FreeList[level].front->free.prev == VMA_NULL);
11116 for(Node* node = m_FreeList[level].front;
11118 node = node->free.next)
11120 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11122 if(node->free.next == VMA_NULL)
11124 VMA_VALIDATE(m_FreeList[level].back == node);
11128 VMA_VALIDATE(node->free.next->free.prev == node);
11134 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11136 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11142 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11144 for(uint32_t level = 0; level < m_LevelCount; ++level)
11146 if(m_FreeList[level].front != VMA_NULL)
11148 return LevelToNodeSize(level);
11154 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11156 const VkDeviceSize unusableSize = GetUnusableSize();
11167 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11169 if(unusableSize > 0)
11178 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11180 const VkDeviceSize unusableSize = GetUnusableSize();
11182 inoutStats.
size += GetSize();
11183 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11188 if(unusableSize > 0)
11195 #if VMA_STATS_STRING_ENABLED
11197 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11201 CalcAllocationStatInfo(stat);
11203 PrintDetailedMap_Begin(
11209 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11211 const VkDeviceSize unusableSize = GetUnusableSize();
11212 if(unusableSize > 0)
11214 PrintDetailedMap_UnusedRange(json,
11219 PrintDetailedMap_End(json);
11222 #endif // #if VMA_STATS_STRING_ENABLED
11224 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11225 uint32_t currentFrameIndex,
11226 uint32_t frameInUseCount,
11227 VkDeviceSize bufferImageGranularity,
11228 VkDeviceSize allocSize,
11229 VkDeviceSize allocAlignment,
11231 VmaSuballocationType allocType,
11232 bool canMakeOtherLost,
11234 VmaAllocationRequest* pAllocationRequest)
11236 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11240 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11241 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11242 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11244 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11245 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11248 if(allocSize > m_UsableSize)
11253 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11254 for(uint32_t level = targetLevel + 1; level--; )
11256 for(Node* freeNode = m_FreeList[level].front;
11257 freeNode != VMA_NULL;
11258 freeNode = freeNode->free.next)
11260 if(freeNode->offset % allocAlignment == 0)
11262 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11263 pAllocationRequest->offset = freeNode->offset;
11264 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11265 pAllocationRequest->sumItemSize = 0;
11266 pAllocationRequest->itemsToMakeLostCount = 0;
11267 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11276 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11277 uint32_t currentFrameIndex,
11278 uint32_t frameInUseCount,
11279 VmaAllocationRequest* pAllocationRequest)
11285 return pAllocationRequest->itemsToMakeLostCount == 0;
11288 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11297 void VmaBlockMetadata_Buddy::Alloc(
11298 const VmaAllocationRequest& request,
11299 VmaSuballocationType type,
11300 VkDeviceSize allocSize,
11303 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11305 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11306 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11308 Node* currNode = m_FreeList[currLevel].front;
11309 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11310 while(currNode->offset != request.offset)
11312 currNode = currNode->free.next;
11313 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11317 while(currLevel < targetLevel)
11321 RemoveFromFreeList(currLevel, currNode);
11323 const uint32_t childrenLevel = currLevel + 1;
11326 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11327 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11329 leftChild->offset = currNode->offset;
11330 leftChild->type = Node::TYPE_FREE;
11331 leftChild->parent = currNode;
11332 leftChild->buddy = rightChild;
11334 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11335 rightChild->type = Node::TYPE_FREE;
11336 rightChild->parent = currNode;
11337 rightChild->buddy = leftChild;
11340 currNode->type = Node::TYPE_SPLIT;
11341 currNode->split.leftChild = leftChild;
11344 AddToFreeListFront(childrenLevel, rightChild);
11345 AddToFreeListFront(childrenLevel, leftChild);
11350 currNode = m_FreeList[currLevel].front;
11359 VMA_ASSERT(currLevel == targetLevel &&
11360 currNode != VMA_NULL &&
11361 currNode->type == Node::TYPE_FREE);
11362 RemoveFromFreeList(currLevel, currNode);
11365 currNode->type = Node::TYPE_ALLOCATION;
11366 currNode->allocation.alloc = hAllocation;
11368 ++m_AllocationCount;
11370 m_SumFreeSize -= allocSize;
11373 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11375 if(node->type == Node::TYPE_SPLIT)
11377 DeleteNode(node->split.leftChild->buddy);
11378 DeleteNode(node->split.leftChild);
11381 vma_delete(GetAllocationCallbacks(), node);
11384 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11386 VMA_VALIDATE(level < m_LevelCount);
11387 VMA_VALIDATE(curr->parent == parent);
11388 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11389 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11392 case Node::TYPE_FREE:
11394 ctx.calculatedSumFreeSize += levelNodeSize;
11395 ++ctx.calculatedFreeCount;
11397 case Node::TYPE_ALLOCATION:
11398 ++ctx.calculatedAllocationCount;
11399 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11400 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11402 case Node::TYPE_SPLIT:
11404 const uint32_t childrenLevel = level + 1;
11405 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11406 const Node*
const leftChild = curr->split.leftChild;
11407 VMA_VALIDATE(leftChild != VMA_NULL);
11408 VMA_VALIDATE(leftChild->offset == curr->offset);
11409 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11411 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11413 const Node*
const rightChild = leftChild->buddy;
11414 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11415 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11417 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11428 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11431 uint32_t level = 0;
11432 VkDeviceSize currLevelNodeSize = m_UsableSize;
11433 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11434 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11437 currLevelNodeSize = nextLevelNodeSize;
11438 nextLevelNodeSize = currLevelNodeSize >> 1;
11443 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11446 Node* node = m_Root;
11447 VkDeviceSize nodeOffset = 0;
11448 uint32_t level = 0;
11449 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11450 while(node->type == Node::TYPE_SPLIT)
11452 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11453 if(offset < nodeOffset + nextLevelSize)
11455 node = node->split.leftChild;
11459 node = node->split.leftChild->buddy;
11460 nodeOffset += nextLevelSize;
11463 levelNodeSize = nextLevelSize;
11466 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11467 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11470 --m_AllocationCount;
11471 m_SumFreeSize += alloc->GetSize();
11473 node->type = Node::TYPE_FREE;
11476 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11478 RemoveFromFreeList(level, node->buddy);
11479 Node*
const parent = node->parent;
11481 vma_delete(GetAllocationCallbacks(), node->buddy);
11482 vma_delete(GetAllocationCallbacks(), node);
11483 parent->type = Node::TYPE_FREE;
11491 AddToFreeListFront(level, node);
11494 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11498 case Node::TYPE_FREE:
11504 case Node::TYPE_ALLOCATION:
11506 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11512 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11513 if(unusedRangeSize > 0)
11522 case Node::TYPE_SPLIT:
11524 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11525 const Node*
const leftChild = node->split.leftChild;
11526 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11527 const Node*
const rightChild = leftChild->buddy;
11528 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11536 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11538 VMA_ASSERT(node->type == Node::TYPE_FREE);
11541 Node*
const frontNode = m_FreeList[level].front;
11542 if(frontNode == VMA_NULL)
11544 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11545 node->free.prev = node->free.next = VMA_NULL;
11546 m_FreeList[level].front = m_FreeList[level].back = node;
11550 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11551 node->free.prev = VMA_NULL;
11552 node->free.next = frontNode;
11553 frontNode->free.prev = node;
11554 m_FreeList[level].front = node;
11558 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11560 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11563 if(node->free.prev == VMA_NULL)
11565 VMA_ASSERT(m_FreeList[level].front == node);
11566 m_FreeList[level].front = node->free.next;
11570 Node*
const prevFreeNode = node->free.prev;
11571 VMA_ASSERT(prevFreeNode->free.next == node);
11572 prevFreeNode->free.next = node->free.next;
11576 if(node->free.next == VMA_NULL)
11578 VMA_ASSERT(m_FreeList[level].back == node);
11579 m_FreeList[level].back = node->free.prev;
11583 Node*
const nextFreeNode = node->free.next;
11584 VMA_ASSERT(nextFreeNode->free.prev == node);
11585 nextFreeNode->free.prev = node->free.prev;
11589 #if VMA_STATS_STRING_ENABLED
11590 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11594 case Node::TYPE_FREE:
11595 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11597 case Node::TYPE_ALLOCATION:
11599 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11600 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11601 if(allocSize < levelNodeSize)
11603 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11607 case Node::TYPE_SPLIT:
11609 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11610 const Node*
const leftChild = node->split.leftChild;
11611 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11612 const Node*
const rightChild = leftChild->buddy;
11613 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11620 #endif // #if VMA_STATS_STRING_ENABLED
11626 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11627 m_pMetadata(VMA_NULL),
11628 m_MemoryTypeIndex(UINT32_MAX),
11630 m_hMemory(VK_NULL_HANDLE),
11632 m_pMappedData(VMA_NULL)
11636 void VmaDeviceMemoryBlock::Init(
11639 uint32_t newMemoryTypeIndex,
11640 VkDeviceMemory newMemory,
11641 VkDeviceSize newSize,
11643 uint32_t algorithm)
11645 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11647 m_hParentPool = hParentPool;
11648 m_MemoryTypeIndex = newMemoryTypeIndex;
11650 m_hMemory = newMemory;
11655 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11658 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11664 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11666 m_pMetadata->Init(newSize);
11669 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11673 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11675 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11676 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11677 m_hMemory = VK_NULL_HANDLE;
11679 vma_delete(allocator, m_pMetadata);
11680 m_pMetadata = VMA_NULL;
11683 bool VmaDeviceMemoryBlock::Validate()
const
11685 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11686 (m_pMetadata->GetSize() != 0));
11688 return m_pMetadata->Validate();
11691 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11693 void* pData =
nullptr;
11694 VkResult res = Map(hAllocator, 1, &pData);
11695 if(res != VK_SUCCESS)
11700 res = m_pMetadata->CheckCorruption(pData);
11702 Unmap(hAllocator, 1);
11707 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11714 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11715 if(m_MapCount != 0)
11717 m_MapCount += count;
11718 VMA_ASSERT(m_pMappedData != VMA_NULL);
11719 if(ppData != VMA_NULL)
11721 *ppData = m_pMappedData;
11727 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11728 hAllocator->m_hDevice,
11734 if(result == VK_SUCCESS)
11736 if(ppData != VMA_NULL)
11738 *ppData = m_pMappedData;
11740 m_MapCount = count;
11746 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11753 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11754 if(m_MapCount >= count)
11756 m_MapCount -= count;
11757 if(m_MapCount == 0)
11759 m_pMappedData = VMA_NULL;
11760 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11765 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11769 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11771 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11772 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11775 VkResult res = Map(hAllocator, 1, &pData);
11776 if(res != VK_SUCCESS)
11781 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11782 VmaWriteMagicValue(pData, allocOffset + allocSize);
11784 Unmap(hAllocator, 1);
11789 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11791 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11792 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11795 VkResult res = Map(hAllocator, 1, &pData);
11796 if(res != VK_SUCCESS)
11801 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11803 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11805 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11807 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11810 Unmap(hAllocator, 1);
11815 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11818 VkDeviceSize allocationLocalOffset,
11822 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11823 hAllocation->GetBlock() ==
this);
11824 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11825 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11826 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11828 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11829 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11832 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11835 VkDeviceSize allocationLocalOffset,
11839 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11840 hAllocation->GetBlock() ==
this);
11841 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11842 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11843 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11845 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11846 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11851 memset(&outInfo, 0,
sizeof(outInfo));
11870 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11878 VmaPool_T::VmaPool_T(
11881 VkDeviceSize preferredBlockSize) :
11885 createInfo.memoryTypeIndex,
11886 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11887 createInfo.minBlockCount,
11888 createInfo.maxBlockCount,
11890 createInfo.frameInUseCount,
11891 createInfo.blockSize != 0,
11898 VmaPool_T::~VmaPool_T()
11902 void VmaPool_T::SetName(
const char* pName)
11904 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
11905 VmaFreeString(allocs, m_Name);
11907 if(pName != VMA_NULL)
11909 m_Name = VmaCreateStringCopy(allocs, pName);
11917 #if VMA_STATS_STRING_ENABLED
11919 #endif // #if VMA_STATS_STRING_ENABLED
11921 VmaBlockVector::VmaBlockVector(
11924 uint32_t memoryTypeIndex,
11925 VkDeviceSize preferredBlockSize,
11926 size_t minBlockCount,
11927 size_t maxBlockCount,
11928 VkDeviceSize bufferImageGranularity,
11929 uint32_t frameInUseCount,
11930 bool explicitBlockSize,
11931 uint32_t algorithm) :
11932 m_hAllocator(hAllocator),
11933 m_hParentPool(hParentPool),
11934 m_MemoryTypeIndex(memoryTypeIndex),
11935 m_PreferredBlockSize(preferredBlockSize),
11936 m_MinBlockCount(minBlockCount),
11937 m_MaxBlockCount(maxBlockCount),
11938 m_BufferImageGranularity(bufferImageGranularity),
11939 m_FrameInUseCount(frameInUseCount),
11940 m_ExplicitBlockSize(explicitBlockSize),
11941 m_Algorithm(algorithm),
11942 m_HasEmptyBlock(false),
11943 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11948 VmaBlockVector::~VmaBlockVector()
11950 for(
size_t i = m_Blocks.size(); i--; )
11952 m_Blocks[i]->Destroy(m_hAllocator);
11953 vma_delete(m_hAllocator, m_Blocks[i]);
11957 VkResult VmaBlockVector::CreateMinBlocks()
11959 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11961 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11962 if(res != VK_SUCCESS)
11970 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11972 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11974 const size_t blockCount = m_Blocks.size();
11983 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11985 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11986 VMA_ASSERT(pBlock);
11987 VMA_HEAVY_ASSERT(pBlock->Validate());
11988 pBlock->m_pMetadata->AddPoolStats(*pStats);
11992 bool VmaBlockVector::IsEmpty()
11994 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11995 return m_Blocks.empty();
11998 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12000 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12001 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12002 (VMA_DEBUG_MARGIN > 0) &&
12004 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12007 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12009 VkResult VmaBlockVector::Allocate(
12010 uint32_t currentFrameIndex,
12012 VkDeviceSize alignment,
12014 VmaSuballocationType suballocType,
12015 size_t allocationCount,
12019 VkResult res = VK_SUCCESS;
12021 if(IsCorruptionDetectionEnabled())
12023 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12024 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12028 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12029 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12031 res = AllocatePage(
12037 pAllocations + allocIndex);
12038 if(res != VK_SUCCESS)
12045 if(res != VK_SUCCESS)
12048 while(allocIndex--)
12050 Free(pAllocations[allocIndex]);
12052 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12058 VkResult VmaBlockVector::AllocatePage(
12059 uint32_t currentFrameIndex,
12061 VkDeviceSize alignment,
12063 VmaSuballocationType suballocType,
12072 VkDeviceSize freeMemory;
12074 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12076 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12080 const bool canFallbackToDedicated = !IsCustomPool();
12081 const bool canCreateNewBlock =
12083 (m_Blocks.size() < m_MaxBlockCount) &&
12084 (freeMemory >= size || !canFallbackToDedicated);
12091 canMakeOtherLost =
false;
12095 if(isUpperAddress &&
12098 return VK_ERROR_FEATURE_NOT_PRESENT;
12112 return VK_ERROR_FEATURE_NOT_PRESENT;
12116 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12118 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12126 if(!canMakeOtherLost || canCreateNewBlock)
12135 if(!m_Blocks.empty())
12137 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12138 VMA_ASSERT(pCurrBlock);
12139 VkResult res = AllocateFromBlock(
12149 if(res == VK_SUCCESS)
12151 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12161 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12163 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12164 VMA_ASSERT(pCurrBlock);
12165 VkResult res = AllocateFromBlock(
12175 if(res == VK_SUCCESS)
12177 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12185 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12187 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12188 VMA_ASSERT(pCurrBlock);
12189 VkResult res = AllocateFromBlock(
12199 if(res == VK_SUCCESS)
12201 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12209 if(canCreateNewBlock)
12212 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12213 uint32_t newBlockSizeShift = 0;
12214 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12216 if(!m_ExplicitBlockSize)
12219 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12220 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12222 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12223 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12225 newBlockSize = smallerNewBlockSize;
12226 ++newBlockSizeShift;
12235 size_t newBlockIndex = 0;
12236 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12237 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12239 if(!m_ExplicitBlockSize)
12241 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12243 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12244 if(smallerNewBlockSize >= size)
12246 newBlockSize = smallerNewBlockSize;
12247 ++newBlockSizeShift;
12248 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12249 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12258 if(res == VK_SUCCESS)
12260 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12261 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12263 res = AllocateFromBlock(
12273 if(res == VK_SUCCESS)
12275 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12281 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12288 if(canMakeOtherLost)
12290 uint32_t tryIndex = 0;
12291 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12293 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12294 VmaAllocationRequest bestRequest = {};
12295 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12301 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12303 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12304 VMA_ASSERT(pCurrBlock);
12305 VmaAllocationRequest currRequest = {};
12306 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12309 m_BufferImageGranularity,
12318 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12319 if(pBestRequestBlock == VMA_NULL ||
12320 currRequestCost < bestRequestCost)
12322 pBestRequestBlock = pCurrBlock;
12323 bestRequest = currRequest;
12324 bestRequestCost = currRequestCost;
12326 if(bestRequestCost == 0)
12337 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12339 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12340 VMA_ASSERT(pCurrBlock);
12341 VmaAllocationRequest currRequest = {};
12342 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12345 m_BufferImageGranularity,
12354 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12355 if(pBestRequestBlock == VMA_NULL ||
12356 currRequestCost < bestRequestCost ||
12359 pBestRequestBlock = pCurrBlock;
12360 bestRequest = currRequest;
12361 bestRequestCost = currRequestCost;
12363 if(bestRequestCost == 0 ||
12373 if(pBestRequestBlock != VMA_NULL)
12377 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12378 if(res != VK_SUCCESS)
12384 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12390 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12391 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12392 UpdateHasEmptyBlock();
12393 (*pAllocation)->InitBlockAllocation(
12395 bestRequest.offset,
12402 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12403 VMA_DEBUG_LOG(
" Returned from existing block");
12404 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12405 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12406 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12408 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12410 if(IsCorruptionDetectionEnabled())
12412 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12413 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12428 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12430 return VK_ERROR_TOO_MANY_OBJECTS;
12434 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12437 void VmaBlockVector::Free(
12440 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12442 bool budgetExceeded =
false;
12444 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12446 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12447 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12452 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12454 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12456 if(IsCorruptionDetectionEnabled())
12458 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12459 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12462 if(hAllocation->IsPersistentMap())
12464 pBlock->Unmap(m_hAllocator, 1);
12467 pBlock->m_pMetadata->Free(hAllocation);
12468 VMA_HEAVY_ASSERT(pBlock->Validate());
12470 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12472 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12474 if(pBlock->m_pMetadata->IsEmpty())
12477 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12479 pBlockToDelete = pBlock;
12486 else if(m_HasEmptyBlock && canDeleteBlock)
12488 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12489 if(pLastBlock->m_pMetadata->IsEmpty())
12491 pBlockToDelete = pLastBlock;
12492 m_Blocks.pop_back();
12496 UpdateHasEmptyBlock();
12497 IncrementallySortBlocks();
12502 if(pBlockToDelete != VMA_NULL)
12504 VMA_DEBUG_LOG(
" Deleted empty block");
12505 pBlockToDelete->Destroy(m_hAllocator);
12506 vma_delete(m_hAllocator, pBlockToDelete);
12510 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12512 VkDeviceSize result = 0;
12513 for(
size_t i = m_Blocks.size(); i--; )
12515 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12516 if(result >= m_PreferredBlockSize)
12524 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12526 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12528 if(m_Blocks[blockIndex] == pBlock)
12530 VmaVectorRemove(m_Blocks, blockIndex);
12537 void VmaBlockVector::IncrementallySortBlocks()
12542 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12544 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12546 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12553 VkResult VmaBlockVector::AllocateFromBlock(
12554 VmaDeviceMemoryBlock* pBlock,
12555 uint32_t currentFrameIndex,
12557 VkDeviceSize alignment,
12560 VmaSuballocationType suballocType,
12569 VmaAllocationRequest currRequest = {};
12570 if(pBlock->m_pMetadata->CreateAllocationRequest(
12573 m_BufferImageGranularity,
12583 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12587 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12588 if(res != VK_SUCCESS)
12594 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12595 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12596 UpdateHasEmptyBlock();
12597 (*pAllocation)->InitBlockAllocation(
12599 currRequest.offset,
12606 VMA_HEAVY_ASSERT(pBlock->Validate());
12607 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12608 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12609 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12611 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12613 if(IsCorruptionDetectionEnabled())
12615 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12616 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12620 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12623 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12625 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12626 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12627 allocInfo.allocationSize = blockSize;
12628 VkDeviceMemory mem = VK_NULL_HANDLE;
12629 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12638 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12644 allocInfo.allocationSize,
12648 m_Blocks.push_back(pBlock);
12649 if(pNewBlockIndex != VMA_NULL)
12651 *pNewBlockIndex = m_Blocks.size() - 1;
12657 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12658 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12659 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12661 const size_t blockCount = m_Blocks.size();
12662 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12666 BLOCK_FLAG_USED = 0x00000001,
12667 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12675 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12676 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12677 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12680 const size_t moveCount = moves.size();
12681 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12683 const VmaDefragmentationMove& move = moves[moveIndex];
12684 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12685 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12688 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12691 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12693 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12694 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12695 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12697 currBlockInfo.pMappedData = pBlock->GetMappedData();
12699 if(currBlockInfo.pMappedData == VMA_NULL)
12701 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12702 if(pDefragCtx->res == VK_SUCCESS)
12704 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12711 if(pDefragCtx->res == VK_SUCCESS)
12713 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12714 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12716 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12718 const VmaDefragmentationMove& move = moves[moveIndex];
12720 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12721 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12723 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12728 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12729 memRange.memory = pSrcBlock->GetDeviceMemory();
12730 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12731 memRange.size = VMA_MIN(
12732 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12733 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12734 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12739 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12740 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12741 static_cast<size_t>(move.size));
12743 if(IsCorruptionDetectionEnabled())
12745 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12746 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12752 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12753 memRange.memory = pDstBlock->GetDeviceMemory();
12754 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12755 memRange.size = VMA_MIN(
12756 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12757 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12758 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12765 for(
size_t blockIndex = blockCount; blockIndex--; )
12767 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12768 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12770 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12771 pBlock->Unmap(m_hAllocator, 1);
12776 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12777 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12778 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12779 VkCommandBuffer commandBuffer)
12781 const size_t blockCount = m_Blocks.size();
12783 pDefragCtx->blockContexts.resize(blockCount);
12784 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12787 const size_t moveCount = moves.size();
12788 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12790 const VmaDefragmentationMove& move = moves[moveIndex];
12795 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12796 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12800 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12804 VkBufferCreateInfo bufCreateInfo;
12805 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12807 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12809 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12810 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12811 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12813 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12814 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12815 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12816 if(pDefragCtx->res == VK_SUCCESS)
12818 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12819 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12826 if(pDefragCtx->res == VK_SUCCESS)
12828 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12830 const VmaDefragmentationMove& move = moves[moveIndex];
12832 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12833 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12835 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12837 VkBufferCopy region = {
12841 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12842 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12847 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12849 pDefragCtx->res = VK_NOT_READY;
12855 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12857 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12858 if(pBlock->m_pMetadata->IsEmpty())
12860 if(m_Blocks.size() > m_MinBlockCount)
12862 if(pDefragmentationStats != VMA_NULL)
12865 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12868 VmaVectorRemove(m_Blocks, blockIndex);
12869 pBlock->Destroy(m_hAllocator);
12870 vma_delete(m_hAllocator, pBlock);
12878 UpdateHasEmptyBlock();
12881 void VmaBlockVector::UpdateHasEmptyBlock()
12883 m_HasEmptyBlock =
false;
12884 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12886 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12887 if(pBlock->m_pMetadata->IsEmpty())
12889 m_HasEmptyBlock =
true;
12895 #if VMA_STATS_STRING_ENABLED
12897 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12899 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12901 json.BeginObject();
12905 const char* poolName = m_hParentPool->GetName();
12906 if(poolName != VMA_NULL && poolName[0] !=
'\0')
12908 json.WriteString(
"Name");
12909 json.WriteString(poolName);
12912 json.WriteString(
"MemoryTypeIndex");
12913 json.WriteNumber(m_MemoryTypeIndex);
12915 json.WriteString(
"BlockSize");
12916 json.WriteNumber(m_PreferredBlockSize);
12918 json.WriteString(
"BlockCount");
12919 json.BeginObject(
true);
12920 if(m_MinBlockCount > 0)
12922 json.WriteString(
"Min");
12923 json.WriteNumber((uint64_t)m_MinBlockCount);
12925 if(m_MaxBlockCount < SIZE_MAX)
12927 json.WriteString(
"Max");
12928 json.WriteNumber((uint64_t)m_MaxBlockCount);
12930 json.WriteString(
"Cur");
12931 json.WriteNumber((uint64_t)m_Blocks.size());
12934 if(m_FrameInUseCount > 0)
12936 json.WriteString(
"FrameInUseCount");
12937 json.WriteNumber(m_FrameInUseCount);
12940 if(m_Algorithm != 0)
12942 json.WriteString(
"Algorithm");
12943 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12948 json.WriteString(
"PreferredBlockSize");
12949 json.WriteNumber(m_PreferredBlockSize);
12952 json.WriteString(
"Blocks");
12953 json.BeginObject();
12954 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12956 json.BeginString();
12957 json.ContinueString(m_Blocks[i]->GetId());
12960 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12967 #endif // #if VMA_STATS_STRING_ENABLED
12969 void VmaBlockVector::Defragment(
12970 class VmaBlockVectorDefragmentationContext* pCtx,
12972 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12973 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12974 VkCommandBuffer commandBuffer)
12976 pCtx->res = VK_SUCCESS;
12978 const VkMemoryPropertyFlags memPropFlags =
12979 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12980 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12982 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12984 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12985 !IsCorruptionDetectionEnabled() &&
12986 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12989 if(canDefragmentOnCpu || canDefragmentOnGpu)
12991 bool defragmentOnGpu;
12993 if(canDefragmentOnGpu != canDefragmentOnCpu)
12995 defragmentOnGpu = canDefragmentOnGpu;
13000 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13001 m_hAllocator->IsIntegratedGpu();
13004 bool overlappingMoveSupported = !defragmentOnGpu;
13006 if(m_hAllocator->m_UseMutex)
13010 if(!m_Mutex.TryLockWrite())
13012 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13018 m_Mutex.LockWrite();
13019 pCtx->mutexLocked =
true;
13023 pCtx->Begin(overlappingMoveSupported, flags);
13027 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13028 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13029 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13032 if(pStats != VMA_NULL)
13034 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13035 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13038 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13039 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13040 if(defragmentOnGpu)
13042 maxGpuBytesToMove -= bytesMoved;
13043 maxGpuAllocationsToMove -= allocationsMoved;
13047 maxCpuBytesToMove -= bytesMoved;
13048 maxCpuAllocationsToMove -= allocationsMoved;
13054 if(m_hAllocator->m_UseMutex)
13055 m_Mutex.UnlockWrite();
13057 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13058 pCtx->res = VK_NOT_READY;
13063 if(pCtx->res >= VK_SUCCESS)
13065 if(defragmentOnGpu)
13067 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13071 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13077 void VmaBlockVector::DefragmentationEnd(
13078 class VmaBlockVectorDefragmentationContext* pCtx,
13082 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
13084 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
13085 if(blockCtx.hBuffer)
13087 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
13088 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13092 if(pCtx->res >= VK_SUCCESS)
13094 FreeEmptyBlocks(pStats);
13097 if(pCtx->mutexLocked)
13099 VMA_ASSERT(m_hAllocator->m_UseMutex);
13100 m_Mutex.UnlockWrite();
13104 uint32_t VmaBlockVector::ProcessDefragmentations(
13105 class VmaBlockVectorDefragmentationContext *pCtx,
13108 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13110 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13112 for(uint32_t i = 0; i < moveCount; ++ i)
13114 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13117 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13118 pMove->
offset = move.dstOffset;
13123 pCtx->defragmentationMovesProcessed += moveCount;
13128 void VmaBlockVector::CommitDefragmentations(
13129 class VmaBlockVectorDefragmentationContext *pCtx,
13132 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13134 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13136 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13138 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13139 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13142 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13143 FreeEmptyBlocks(pStats);
13146 size_t VmaBlockVector::CalcAllocationCount()
const
13149 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13151 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13156 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13158 if(m_BufferImageGranularity == 1)
13162 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13163 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13165 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13166 VMA_ASSERT(m_Algorithm == 0);
13167 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13168 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13176 void VmaBlockVector::MakePoolAllocationsLost(
13177 uint32_t currentFrameIndex,
13178 size_t* pLostAllocationCount)
13180 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13181 size_t lostAllocationCount = 0;
13182 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13184 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13185 VMA_ASSERT(pBlock);
13186 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13188 if(pLostAllocationCount != VMA_NULL)
13190 *pLostAllocationCount = lostAllocationCount;
13194 VkResult VmaBlockVector::CheckCorruption()
13196 if(!IsCorruptionDetectionEnabled())
13198 return VK_ERROR_FEATURE_NOT_PRESENT;
13201 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13202 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13204 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13205 VMA_ASSERT(pBlock);
13206 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13207 if(res != VK_SUCCESS)
13215 void VmaBlockVector::AddStats(
VmaStats* pStats)
13217 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13218 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13220 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13222 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13224 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13225 VMA_ASSERT(pBlock);
13226 VMA_HEAVY_ASSERT(pBlock->Validate());
13228 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13229 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13230 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13231 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13238 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13240 VmaBlockVector* pBlockVector,
13241 uint32_t currentFrameIndex,
13242 bool overlappingMoveSupported) :
13243 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13244 m_AllocationCount(0),
13245 m_AllAllocations(false),
13247 m_AllocationsMoved(0),
13248 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13251 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13252 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13254 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13255 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13256 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13257 m_Blocks.push_back(pBlockInfo);
13261 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13264 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13266 for(
size_t i = m_Blocks.size(); i--; )
13268 vma_delete(m_hAllocator, m_Blocks[i]);
13272 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13275 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13277 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13278 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13279 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13281 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13282 (*it)->m_Allocations.push_back(allocInfo);
13289 ++m_AllocationCount;
13293 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13294 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13295 VkDeviceSize maxBytesToMove,
13296 uint32_t maxAllocationsToMove,
13297 bool freeOldAllocations)
13299 if(m_Blocks.empty())
13312 size_t srcBlockMinIndex = 0;
13325 size_t srcBlockIndex = m_Blocks.size() - 1;
13326 size_t srcAllocIndex = SIZE_MAX;
13332 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13334 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13337 if(srcBlockIndex == srcBlockMinIndex)
13344 srcAllocIndex = SIZE_MAX;
13349 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13353 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13354 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13356 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13357 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13358 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13359 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13362 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13364 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13365 VmaAllocationRequest dstAllocRequest;
13366 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13367 m_CurrentFrameIndex,
13368 m_pBlockVector->GetFrameInUseCount(),
13369 m_pBlockVector->GetBufferImageGranularity(),
13376 &dstAllocRequest) &&
13378 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13380 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13383 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13384 (m_BytesMoved + size > maxBytesToMove))
13389 VmaDefragmentationMove move = {};
13390 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13391 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13392 move.srcOffset = srcOffset;
13393 move.dstOffset = dstAllocRequest.offset;
13395 move.hAllocation = allocInfo.m_hAllocation;
13396 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13397 move.pDstBlock = pDstBlockInfo->m_pBlock;
13399 moves.push_back(move);
13401 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13405 allocInfo.m_hAllocation);
13407 if(freeOldAllocations)
13409 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13410 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13413 if(allocInfo.m_pChanged != VMA_NULL)
13415 *allocInfo.m_pChanged = VK_TRUE;
13418 ++m_AllocationsMoved;
13419 m_BytesMoved += size;
13421 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13429 if(srcAllocIndex > 0)
13435 if(srcBlockIndex > 0)
13438 srcAllocIndex = SIZE_MAX;
13448 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13451 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13453 if(m_Blocks[i]->m_HasNonMovableAllocations)
13461 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13462 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13463 VkDeviceSize maxBytesToMove,
13464 uint32_t maxAllocationsToMove,
13467 if(!m_AllAllocations && m_AllocationCount == 0)
13472 const size_t blockCount = m_Blocks.size();
13473 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13475 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13477 if(m_AllAllocations)
13479 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13480 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13481 it != pMetadata->m_Suballocations.end();
13484 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13486 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13487 pBlockInfo->m_Allocations.push_back(allocInfo);
13492 pBlockInfo->CalcHasNonMovableAllocations();
13496 pBlockInfo->SortAllocationsByOffsetDescending();
13502 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13505 const uint32_t roundCount = 2;
13508 VkResult result = VK_SUCCESS;
13509 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13517 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13518 size_t dstBlockIndex, VkDeviceSize dstOffset,
13519 size_t srcBlockIndex, VkDeviceSize srcOffset)
13521 if(dstBlockIndex < srcBlockIndex)
13525 if(dstBlockIndex > srcBlockIndex)
13529 if(dstOffset < srcOffset)
13539 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13541 VmaBlockVector* pBlockVector,
13542 uint32_t currentFrameIndex,
13543 bool overlappingMoveSupported) :
13544 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13545 m_OverlappingMoveSupported(overlappingMoveSupported),
13546 m_AllocationCount(0),
13547 m_AllAllocations(false),
13549 m_AllocationsMoved(0),
13550 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13552 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13556 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13560 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13561 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13562 VkDeviceSize maxBytesToMove,
13563 uint32_t maxAllocationsToMove,
13566 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13568 const size_t blockCount = m_pBlockVector->GetBlockCount();
13569 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13574 PreprocessMetadata();
13578 m_BlockInfos.resize(blockCount);
13579 for(
size_t i = 0; i < blockCount; ++i)
13581 m_BlockInfos[i].origBlockIndex = i;
13584 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13585 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13586 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13591 FreeSpaceDatabase freeSpaceDb;
13593 size_t dstBlockInfoIndex = 0;
13594 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13595 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13596 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13597 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13598 VkDeviceSize dstOffset = 0;
13601 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13603 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13604 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13605 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13606 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13607 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13609 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13610 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13611 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13612 if(m_AllocationsMoved == maxAllocationsToMove ||
13613 m_BytesMoved + srcAllocSize > maxBytesToMove)
13618 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13620 VmaDefragmentationMove move = {};
13622 size_t freeSpaceInfoIndex;
13623 VkDeviceSize dstAllocOffset;
13624 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13625 freeSpaceInfoIndex, dstAllocOffset))
13627 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13628 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13629 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13632 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13634 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13638 VmaSuballocation suballoc = *srcSuballocIt;
13639 suballoc.offset = dstAllocOffset;
13640 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13641 m_BytesMoved += srcAllocSize;
13642 ++m_AllocationsMoved;
13644 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13646 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13647 srcSuballocIt = nextSuballocIt;
13649 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13651 move.srcBlockIndex = srcOrigBlockIndex;
13652 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13653 move.srcOffset = srcAllocOffset;
13654 move.dstOffset = dstAllocOffset;
13655 move.size = srcAllocSize;
13657 moves.push_back(move);
13664 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13666 VmaSuballocation suballoc = *srcSuballocIt;
13667 suballoc.offset = dstAllocOffset;
13668 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13669 m_BytesMoved += srcAllocSize;
13670 ++m_AllocationsMoved;
13672 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13674 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13675 srcSuballocIt = nextSuballocIt;
13677 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13679 move.srcBlockIndex = srcOrigBlockIndex;
13680 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13681 move.srcOffset = srcAllocOffset;
13682 move.dstOffset = dstAllocOffset;
13683 move.size = srcAllocSize;
13685 moves.push_back(move);
13690 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13693 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13694 dstAllocOffset + srcAllocSize > dstBlockSize)
13697 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13699 ++dstBlockInfoIndex;
13700 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13701 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13702 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13703 dstBlockSize = pDstMetadata->GetSize();
13705 dstAllocOffset = 0;
13709 if(dstBlockInfoIndex == srcBlockInfoIndex)
13711 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13713 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13715 bool skipOver = overlap;
13716 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13720 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13725 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13727 dstOffset = srcAllocOffset + srcAllocSize;
13733 srcSuballocIt->offset = dstAllocOffset;
13734 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13735 dstOffset = dstAllocOffset + srcAllocSize;
13736 m_BytesMoved += srcAllocSize;
13737 ++m_AllocationsMoved;
13740 move.srcBlockIndex = srcOrigBlockIndex;
13741 move.dstBlockIndex = dstOrigBlockIndex;
13742 move.srcOffset = srcAllocOffset;
13743 move.dstOffset = dstAllocOffset;
13744 move.size = srcAllocSize;
13746 moves.push_back(move);
13754 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13755 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13757 VmaSuballocation suballoc = *srcSuballocIt;
13758 suballoc.offset = dstAllocOffset;
13759 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13760 dstOffset = dstAllocOffset + srcAllocSize;
13761 m_BytesMoved += srcAllocSize;
13762 ++m_AllocationsMoved;
13764 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13766 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13767 srcSuballocIt = nextSuballocIt;
13769 pDstMetadata->m_Suballocations.push_back(suballoc);
13771 move.srcBlockIndex = srcOrigBlockIndex;
13772 move.dstBlockIndex = dstOrigBlockIndex;
13773 move.srcOffset = srcAllocOffset;
13774 move.dstOffset = dstAllocOffset;
13775 move.size = srcAllocSize;
13777 moves.push_back(move);
13783 m_BlockInfos.clear();
13785 PostprocessMetadata();
13790 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13792 const size_t blockCount = m_pBlockVector->GetBlockCount();
13793 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13795 VmaBlockMetadata_Generic*
const pMetadata =
13796 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13797 pMetadata->m_FreeCount = 0;
13798 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13799 pMetadata->m_FreeSuballocationsBySize.clear();
13800 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13801 it != pMetadata->m_Suballocations.end(); )
13803 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13805 VmaSuballocationList::iterator nextIt = it;
13807 pMetadata->m_Suballocations.erase(it);
13818 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13820 const size_t blockCount = m_pBlockVector->GetBlockCount();
13821 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13823 VmaBlockMetadata_Generic*
const pMetadata =
13824 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13825 const VkDeviceSize blockSize = pMetadata->GetSize();
13828 if(pMetadata->m_Suballocations.empty())
13830 pMetadata->m_FreeCount = 1;
13832 VmaSuballocation suballoc = {
13836 VMA_SUBALLOCATION_TYPE_FREE };
13837 pMetadata->m_Suballocations.push_back(suballoc);
13838 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13843 VkDeviceSize offset = 0;
13844 VmaSuballocationList::iterator it;
13845 for(it = pMetadata->m_Suballocations.begin();
13846 it != pMetadata->m_Suballocations.end();
13849 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13850 VMA_ASSERT(it->offset >= offset);
13853 if(it->offset > offset)
13855 ++pMetadata->m_FreeCount;
13856 const VkDeviceSize freeSize = it->offset - offset;
13857 VmaSuballocation suballoc = {
13861 VMA_SUBALLOCATION_TYPE_FREE };
13862 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13863 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13865 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13869 pMetadata->m_SumFreeSize -= it->size;
13870 offset = it->offset + it->size;
13874 if(offset < blockSize)
13876 ++pMetadata->m_FreeCount;
13877 const VkDeviceSize freeSize = blockSize - offset;
13878 VmaSuballocation suballoc = {
13882 VMA_SUBALLOCATION_TYPE_FREE };
13883 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13884 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13885 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13887 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13892 pMetadata->m_FreeSuballocationsBySize.begin(),
13893 pMetadata->m_FreeSuballocationsBySize.end(),
13894 VmaSuballocationItemSizeLess());
13897 VMA_HEAVY_ASSERT(pMetadata->Validate());
13901 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13904 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13905 while(it != pMetadata->m_Suballocations.end())
13907 if(it->offset < suballoc.offset)
13912 pMetadata->m_Suballocations.insert(it, suballoc);
13918 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13921 VmaBlockVector* pBlockVector,
13922 uint32_t currFrameIndex) :
13924 mutexLocked(false),
13925 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13926 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
13927 defragmentationMovesProcessed(0),
13928 defragmentationMovesCommitted(0),
13929 hasDefragmentationPlan(0),
13930 m_hAllocator(hAllocator),
13931 m_hCustomPool(hCustomPool),
13932 m_pBlockVector(pBlockVector),
13933 m_CurrFrameIndex(currFrameIndex),
13934 m_pAlgorithm(VMA_NULL),
13935 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13936 m_AllAllocations(false)
13940 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13942 vma_delete(m_hAllocator, m_pAlgorithm);
13945 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13947 AllocInfo info = { hAlloc, pChanged };
13948 m_Allocations.push_back(info);
13951 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
13953 const bool allAllocations = m_AllAllocations ||
13954 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13967 if(VMA_DEBUG_MARGIN == 0 &&
13969 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
13972 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13973 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13977 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13978 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13983 m_pAlgorithm->AddAll();
13987 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13989 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13997 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13999 uint32_t currFrameIndex,
14002 m_hAllocator(hAllocator),
14003 m_CurrFrameIndex(currFrameIndex),
14006 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14008 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14011 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14013 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14015 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14016 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
14017 vma_delete(m_hAllocator, pBlockVectorCtx);
14019 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14021 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14022 if(pBlockVectorCtx)
14024 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
14025 vma_delete(m_hAllocator, pBlockVectorCtx);
14030 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
14032 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14034 VmaPool pool = pPools[poolIndex];
14037 if(pool->m_BlockVector.GetAlgorithm() == 0)
14039 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14041 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14043 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14045 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14050 if(!pBlockVectorDefragCtx)
14052 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14055 &pool->m_BlockVector,
14057 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14060 pBlockVectorDefragCtx->AddAll();
14065 void VmaDefragmentationContext_T::AddAllocations(
14066 uint32_t allocationCount,
14068 VkBool32* pAllocationsChanged)
14071 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14074 VMA_ASSERT(hAlloc);
14076 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14078 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14080 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14082 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14084 if(hAllocPool != VK_NULL_HANDLE)
14087 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14089 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14091 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14093 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14097 if(!pBlockVectorDefragCtx)
14099 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14102 &hAllocPool->m_BlockVector,
14104 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14111 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14112 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14113 if(!pBlockVectorDefragCtx)
14115 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14118 m_hAllocator->m_pBlockVectors[memTypeIndex],
14120 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14124 if(pBlockVectorDefragCtx)
14126 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14127 &pAllocationsChanged[allocIndex] : VMA_NULL;
14128 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14134 VkResult VmaDefragmentationContext_T::Defragment(
14135 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14136 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14148 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14149 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14151 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14152 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14154 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14155 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14158 return VK_NOT_READY;
14161 if(commandBuffer == VK_NULL_HANDLE)
14163 maxGpuBytesToMove = 0;
14164 maxGpuAllocationsToMove = 0;
14167 VkResult res = VK_SUCCESS;
14170 for(uint32_t memTypeIndex = 0;
14171 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14174 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14175 if(pBlockVectorCtx)
14177 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14178 pBlockVectorCtx->GetBlockVector()->Defragment(
14181 maxCpuBytesToMove, maxCpuAllocationsToMove,
14182 maxGpuBytesToMove, maxGpuAllocationsToMove,
14184 if(pBlockVectorCtx->res != VK_SUCCESS)
14186 res = pBlockVectorCtx->res;
14192 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14193 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14196 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14197 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14198 pBlockVectorCtx->GetBlockVector()->Defragment(
14201 maxCpuBytesToMove, maxCpuAllocationsToMove,
14202 maxGpuBytesToMove, maxGpuAllocationsToMove,
14204 if(pBlockVectorCtx->res != VK_SUCCESS)
14206 res = pBlockVectorCtx->res;
14219 for(uint32_t memTypeIndex = 0;
14220 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14223 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14224 if(pBlockVectorCtx)
14226 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14228 if(!pBlockVectorCtx->hasDefragmentationPlan)
14230 pBlockVectorCtx->GetBlockVector()->Defragment(
14233 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14234 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14237 if(pBlockVectorCtx->res < VK_SUCCESS)
14240 pBlockVectorCtx->hasDefragmentationPlan =
true;
14243 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14245 pCurrentMove, movesLeft);
14247 movesLeft -= processed;
14248 pCurrentMove += processed;
14253 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14254 customCtxIndex < customCtxCount;
14257 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14258 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14260 if(!pBlockVectorCtx->hasDefragmentationPlan)
14262 pBlockVectorCtx->GetBlockVector()->Defragment(
14265 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14266 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14269 if(pBlockVectorCtx->res < VK_SUCCESS)
14272 pBlockVectorCtx->hasDefragmentationPlan =
true;
14275 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14277 pCurrentMove, movesLeft);
14279 movesLeft -= processed;
14280 pCurrentMove += processed;
14287 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14289 VkResult res = VK_SUCCESS;
14292 for(uint32_t memTypeIndex = 0;
14293 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14296 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14297 if(pBlockVectorCtx)
14299 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14301 if(!pBlockVectorCtx->hasDefragmentationPlan)
14303 res = VK_NOT_READY;
14307 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14308 pBlockVectorCtx, m_pStats);
14310 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14311 res = VK_NOT_READY;
14316 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14317 customCtxIndex < customCtxCount;
14320 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14321 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14323 if(!pBlockVectorCtx->hasDefragmentationPlan)
14325 res = VK_NOT_READY;
14329 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14330 pBlockVectorCtx, m_pStats);
14332 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14333 res = VK_NOT_READY;
14342 #if VMA_RECORDING_ENABLED
14344 VmaRecorder::VmaRecorder() :
14349 m_StartCounter(INT64_MAX)
14355 m_UseMutex = useMutex;
14356 m_Flags = settings.
flags;
14358 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
14359 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
14362 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14365 return VK_ERROR_INITIALIZATION_FAILED;
14369 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14370 fprintf(m_File,
"%s\n",
"1,8");
14375 VmaRecorder::~VmaRecorder()
14377 if(m_File != VMA_NULL)
14383 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14385 CallParams callParams;
14386 GetBasicParams(callParams);
14388 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14389 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14393 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14395 CallParams callParams;
14396 GetBasicParams(callParams);
14398 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14399 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14405 CallParams callParams;
14406 GetBasicParams(callParams);
14408 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14409 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14420 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14422 CallParams callParams;
14423 GetBasicParams(callParams);
14425 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14426 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14431 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14432 const VkMemoryRequirements& vkMemReq,
14436 CallParams callParams;
14437 GetBasicParams(callParams);
14439 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14440 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14441 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14443 vkMemReq.alignment,
14444 vkMemReq.memoryTypeBits,
14452 userDataStr.GetString());
14456 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14457 const VkMemoryRequirements& vkMemReq,
14459 uint64_t allocationCount,
14462 CallParams callParams;
14463 GetBasicParams(callParams);
14465 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14466 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14467 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14469 vkMemReq.alignment,
14470 vkMemReq.memoryTypeBits,
14477 PrintPointerList(allocationCount, pAllocations);
14478 fprintf(m_File,
",%s\n", userDataStr.GetString());
14482 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14483 const VkMemoryRequirements& vkMemReq,
14484 bool requiresDedicatedAllocation,
14485 bool prefersDedicatedAllocation,
14489 CallParams callParams;
14490 GetBasicParams(callParams);
14492 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14493 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14494 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14496 vkMemReq.alignment,
14497 vkMemReq.memoryTypeBits,
14498 requiresDedicatedAllocation ? 1 : 0,
14499 prefersDedicatedAllocation ? 1 : 0,
14507 userDataStr.GetString());
14511 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14512 const VkMemoryRequirements& vkMemReq,
14513 bool requiresDedicatedAllocation,
14514 bool prefersDedicatedAllocation,
14518 CallParams callParams;
14519 GetBasicParams(callParams);
14521 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14522 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14523 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14525 vkMemReq.alignment,
14526 vkMemReq.memoryTypeBits,
14527 requiresDedicatedAllocation ? 1 : 0,
14528 prefersDedicatedAllocation ? 1 : 0,
14536 userDataStr.GetString());
14540 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14543 CallParams callParams;
14544 GetBasicParams(callParams);
14546 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14547 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14552 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14553 uint64_t allocationCount,
14556 CallParams callParams;
14557 GetBasicParams(callParams);
14559 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14560 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14561 PrintPointerList(allocationCount, pAllocations);
14562 fprintf(m_File,
"\n");
14566 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14568 const void* pUserData)
14570 CallParams callParams;
14571 GetBasicParams(callParams);
14573 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14574 UserDataString userDataStr(
14577 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14579 userDataStr.GetString());
14583 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14586 CallParams callParams;
14587 GetBasicParams(callParams);
14589 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14590 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14595 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14598 CallParams callParams;
14599 GetBasicParams(callParams);
14601 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14602 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14607 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14610 CallParams callParams;
14611 GetBasicParams(callParams);
14613 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14614 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14619 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14620 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14622 CallParams callParams;
14623 GetBasicParams(callParams);
14625 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14626 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14633 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14634 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14636 CallParams callParams;
14637 GetBasicParams(callParams);
14639 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14640 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14647 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14648 const VkBufferCreateInfo& bufCreateInfo,
14652 CallParams callParams;
14653 GetBasicParams(callParams);
14655 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14656 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14657 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14658 bufCreateInfo.flags,
14659 bufCreateInfo.size,
14660 bufCreateInfo.usage,
14661 bufCreateInfo.sharingMode,
14662 allocCreateInfo.
flags,
14663 allocCreateInfo.
usage,
14667 allocCreateInfo.
pool,
14669 userDataStr.GetString());
14673 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14674 const VkImageCreateInfo& imageCreateInfo,
14678 CallParams callParams;
14679 GetBasicParams(callParams);
14681 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14682 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14683 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14684 imageCreateInfo.flags,
14685 imageCreateInfo.imageType,
14686 imageCreateInfo.format,
14687 imageCreateInfo.extent.width,
14688 imageCreateInfo.extent.height,
14689 imageCreateInfo.extent.depth,
14690 imageCreateInfo.mipLevels,
14691 imageCreateInfo.arrayLayers,
14692 imageCreateInfo.samples,
14693 imageCreateInfo.tiling,
14694 imageCreateInfo.usage,
14695 imageCreateInfo.sharingMode,
14696 imageCreateInfo.initialLayout,
14697 allocCreateInfo.
flags,
14698 allocCreateInfo.
usage,
14702 allocCreateInfo.
pool,
14704 userDataStr.GetString());
14708 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14711 CallParams callParams;
14712 GetBasicParams(callParams);
14714 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14715 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14720 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14723 CallParams callParams;
14724 GetBasicParams(callParams);
14726 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14727 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14732 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14735 CallParams callParams;
14736 GetBasicParams(callParams);
14738 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14739 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14744 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14747 CallParams callParams;
14748 GetBasicParams(callParams);
14750 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14751 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14756 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14759 CallParams callParams;
14760 GetBasicParams(callParams);
14762 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14763 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14768 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14772 CallParams callParams;
14773 GetBasicParams(callParams);
14775 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14776 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14779 fprintf(m_File,
",");
14781 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14791 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14794 CallParams callParams;
14795 GetBasicParams(callParams);
14797 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14798 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14803 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14807 CallParams callParams;
14808 GetBasicParams(callParams);
14810 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14811 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14812 pool, name != VMA_NULL ? name :
"");
14818 if(pUserData != VMA_NULL)
14822 m_Str = (
const char*)pUserData;
14826 sprintf_s(m_PtrStr,
"%p", pUserData);
14836 void VmaRecorder::WriteConfiguration(
14837 const VkPhysicalDeviceProperties& devProps,
14838 const VkPhysicalDeviceMemoryProperties& memProps,
14839 uint32_t vulkanApiVersion,
14840 bool dedicatedAllocationExtensionEnabled,
14841 bool bindMemory2ExtensionEnabled,
14842 bool memoryBudgetExtensionEnabled,
14843 bool deviceCoherentMemoryExtensionEnabled)
14845 fprintf(m_File,
"Config,Begin\n");
14847 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
14849 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14850 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14851 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14852 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14853 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14854 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14856 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14857 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14858 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14860 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14861 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14863 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14864 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14866 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14867 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14869 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14870 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14873 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14874 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14875 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14876 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
14878 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14879 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14880 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14881 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14882 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14883 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14884 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14885 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14886 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14888 fprintf(m_File,
"Config,End\n");
14891 void VmaRecorder::GetBasicParams(CallParams& outParams)
14893 outParams.threadId = GetCurrentThreadId();
14895 LARGE_INTEGER counter;
14896 QueryPerformanceCounter(&counter);
14897 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14900 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14904 fprintf(m_File,
"%p", pItems[0]);
14905 for(uint64_t i = 1; i < count; ++i)
14907 fprintf(m_File,
" %p", pItems[i]);
14912 void VmaRecorder::Flush()
14920 #endif // #if VMA_RECORDING_ENABLED
14925 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14926 m_Allocator(pAllocationCallbacks, 1024)
14930 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
14932 VmaMutexLock mutexLock(m_Mutex);
14933 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
14936 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14938 VmaMutexLock mutexLock(m_Mutex);
14939 m_Allocator.Free(hAlloc);
14947 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
14952 m_hDevice(pCreateInfo->device),
14953 m_hInstance(pCreateInfo->instance),
14954 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14955 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14956 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14957 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14958 m_HeapSizeLimitMask(0),
14959 m_PreferredLargeHeapBlockSize(0),
14960 m_PhysicalDevice(pCreateInfo->physicalDevice),
14961 m_CurrentFrameIndex(0),
14962 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14963 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14965 m_GlobalMemoryTypeBits(UINT32_MAX)
14967 ,m_pRecorder(VMA_NULL)
14970 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14972 m_UseKhrDedicatedAllocation =
false;
14973 m_UseKhrBindMemory2 =
false;
14976 if(VMA_DEBUG_DETECT_CORRUPTION)
14979 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14984 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14986 #if !(VMA_DEDICATED_ALLOCATION)
14989 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14992 #if !(VMA_BIND_MEMORY2)
14995 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14999 #if !(VMA_MEMORY_BUDGET)
15002 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15005 #if VMA_VULKAN_VERSION < 1001000
15006 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15008 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15012 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15013 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15014 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15016 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15017 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15018 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15028 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15029 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15031 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15032 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15033 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15034 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15039 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15043 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15045 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15046 if(limit != VK_WHOLE_SIZE)
15048 m_HeapSizeLimitMask |= 1u << heapIndex;
15049 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15051 m_MemProps.memoryHeaps[heapIndex].size = limit;
15057 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15059 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15061 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15065 preferredBlockSize,
15068 GetBufferImageGranularity(),
15074 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15081 VkResult res = VK_SUCCESS;
15086 #if VMA_RECORDING_ENABLED
15087 m_pRecorder = vma_new(
this, VmaRecorder)();
15089 if(res != VK_SUCCESS)
15093 m_pRecorder->WriteConfiguration(
15094 m_PhysicalDeviceProperties,
15096 m_VulkanApiVersion,
15097 m_UseKhrDedicatedAllocation,
15098 m_UseKhrBindMemory2,
15099 m_UseExtMemoryBudget,
15100 m_UseAmdDeviceCoherentMemory);
15101 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15103 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15104 return VK_ERROR_FEATURE_NOT_PRESENT;
15108 #if VMA_MEMORY_BUDGET
15109 if(m_UseExtMemoryBudget)
15111 UpdateVulkanBudget();
15113 #endif // #if VMA_MEMORY_BUDGET
15118 VmaAllocator_T::~VmaAllocator_T()
15120 #if VMA_RECORDING_ENABLED
15121 if(m_pRecorder != VMA_NULL)
15123 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15124 vma_delete(
this, m_pRecorder);
15128 VMA_ASSERT(m_Pools.empty());
15130 for(
size_t i = GetMemoryTypeCount(); i--; )
15132 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15134 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15137 vma_delete(
this, m_pDedicatedAllocations[i]);
15138 vma_delete(
this, m_pBlockVectors[i]);
15142 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15144 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15145 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15146 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15147 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15148 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15149 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15150 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15151 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15152 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15153 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15154 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15155 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15156 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15157 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15158 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15159 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15160 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15161 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15162 #if VMA_VULKAN_VERSION >= 1001000
15163 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15165 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
15166 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
15167 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2");
15168 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
15169 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2");
15170 m_VulkanFunctions.vkBindBufferMemory2KHR =
15171 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2");
15172 m_VulkanFunctions.vkBindImageMemory2KHR =
15173 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2");
15174 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
15175 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2");
15178 #if VMA_DEDICATED_ALLOCATION
15179 if(m_UseKhrDedicatedAllocation)
15181 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
15182 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
15183 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
15184 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
15187 #if VMA_BIND_MEMORY2
15188 if(m_UseKhrBindMemory2)
15190 m_VulkanFunctions.vkBindBufferMemory2KHR =
15191 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
15192 m_VulkanFunctions.vkBindImageMemory2KHR =
15193 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
15195 #endif // #if VMA_BIND_MEMORY2
15196 #if VMA_MEMORY_BUDGET
15197 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15199 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
15200 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
15201 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15203 #endif // #if VMA_MEMORY_BUDGET
15204 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15206 #define VMA_COPY_IF_NOT_NULL(funcName) \
15207 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15209 if(pVulkanFunctions != VMA_NULL)
15211 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15212 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15213 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15214 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15215 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15216 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15217 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15218 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15219 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15220 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15221 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15222 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15223 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15224 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15225 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15226 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15227 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15228 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15229 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15230 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15232 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15233 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15234 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15236 #if VMA_MEMORY_BUDGET
15237 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15241 #undef VMA_COPY_IF_NOT_NULL
15245 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15246 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15247 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15248 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15249 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15250 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15251 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15252 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15253 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15254 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15255 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15256 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15257 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15258 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15259 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15260 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15261 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15262 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15263 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15265 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15266 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15269 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15270 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15272 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15273 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15276 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15277 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15279 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15284 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15286 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15287 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15288 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15289 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15292 VkResult VmaAllocator_T::AllocateMemoryOfType(
15294 VkDeviceSize alignment,
15295 bool dedicatedAllocation,
15296 VkBuffer dedicatedBuffer,
15297 VkImage dedicatedImage,
15299 uint32_t memTypeIndex,
15300 VmaSuballocationType suballocType,
15301 size_t allocationCount,
15304 VMA_ASSERT(pAllocations != VMA_NULL);
15305 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15311 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15321 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15322 VMA_ASSERT(blockVector);
15324 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15325 bool preferDedicatedMemory =
15326 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15327 dedicatedAllocation ||
15329 size > preferredBlockSize / 2;
15331 if(preferDedicatedMemory &&
15333 finalCreateInfo.
pool == VK_NULL_HANDLE)
15342 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15346 return AllocateDedicatedMemory(
15362 VkResult res = blockVector->Allocate(
15363 m_CurrentFrameIndex.load(),
15370 if(res == VK_SUCCESS)
15378 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15382 res = AllocateDedicatedMemory(
15389 finalCreateInfo.pUserData,
15394 if(res == VK_SUCCESS)
15397 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15403 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15410 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15412 VmaSuballocationType suballocType,
15413 uint32_t memTypeIndex,
15416 bool isUserDataString,
15418 VkBuffer dedicatedBuffer,
15419 VkImage dedicatedImage,
15420 size_t allocationCount,
15423 VMA_ASSERT(allocationCount > 0 && pAllocations);
15427 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15429 GetBudget(&heapBudget, heapIndex, 1);
15430 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15432 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15436 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15437 allocInfo.memoryTypeIndex = memTypeIndex;
15438 allocInfo.allocationSize = size;
15440 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15441 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15442 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15444 if(dedicatedBuffer != VK_NULL_HANDLE)
15446 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15447 dedicatedAllocInfo.buffer = dedicatedBuffer;
15448 allocInfo.pNext = &dedicatedAllocInfo;
15450 else if(dedicatedImage != VK_NULL_HANDLE)
15452 dedicatedAllocInfo.image = dedicatedImage;
15453 allocInfo.pNext = &dedicatedAllocInfo;
15456 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15459 VkResult res = VK_SUCCESS;
15460 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15462 res = AllocateDedicatedMemoryPage(
15470 pAllocations + allocIndex);
15471 if(res != VK_SUCCESS)
15477 if(res == VK_SUCCESS)
15481 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15482 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15483 VMA_ASSERT(pDedicatedAllocations);
15484 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15486 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15490 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15495 while(allocIndex--)
15498 VkDeviceMemory hMemory = currAlloc->GetMemory();
15510 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15511 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15512 currAlloc->SetUserData(
this, VMA_NULL);
15513 m_AllocationObjectAllocator.Free(currAlloc);
15516 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15522 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15524 VmaSuballocationType suballocType,
15525 uint32_t memTypeIndex,
15526 const VkMemoryAllocateInfo& allocInfo,
15528 bool isUserDataString,
15532 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15533 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15536 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15540 void* pMappedData = VMA_NULL;
15543 res = (*m_VulkanFunctions.vkMapMemory)(
15552 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15553 FreeVulkanMemory(memTypeIndex, size, hMemory);
15558 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
15559 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15560 (*pAllocation)->SetUserData(
this, pUserData);
15561 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15562 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15564 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15570 void VmaAllocator_T::GetBufferMemoryRequirements(
15572 VkMemoryRequirements& memReq,
15573 bool& requiresDedicatedAllocation,
15574 bool& prefersDedicatedAllocation)
const
15576 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15577 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15579 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15580 memReqInfo.buffer = hBuffer;
15582 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15584 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15585 memReq2.pNext = &memDedicatedReq;
15587 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15589 memReq = memReq2.memoryRequirements;
15590 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15591 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15594 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15596 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15597 requiresDedicatedAllocation =
false;
15598 prefersDedicatedAllocation =
false;
15602 void VmaAllocator_T::GetImageMemoryRequirements(
15604 VkMemoryRequirements& memReq,
15605 bool& requiresDedicatedAllocation,
15606 bool& prefersDedicatedAllocation)
const
15608 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15609 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15611 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15612 memReqInfo.image = hImage;
15614 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15616 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15617 memReq2.pNext = &memDedicatedReq;
15619 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15621 memReq = memReq2.memoryRequirements;
15622 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15623 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15626 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15628 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15629 requiresDedicatedAllocation =
false;
15630 prefersDedicatedAllocation =
false;
15634 VkResult VmaAllocator_T::AllocateMemory(
15635 const VkMemoryRequirements& vkMemReq,
15636 bool requiresDedicatedAllocation,
15637 bool prefersDedicatedAllocation,
15638 VkBuffer dedicatedBuffer,
15639 VkImage dedicatedImage,
15641 VmaSuballocationType suballocType,
15642 size_t allocationCount,
15645 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15647 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15649 if(vkMemReq.size == 0)
15651 return VK_ERROR_VALIDATION_FAILED_EXT;
15656 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15657 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15662 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15663 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15665 if(requiresDedicatedAllocation)
15669 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15670 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15672 if(createInfo.
pool != VK_NULL_HANDLE)
15674 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15675 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15678 if((createInfo.
pool != VK_NULL_HANDLE) &&
15681 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15682 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15685 if(createInfo.
pool != VK_NULL_HANDLE)
15687 const VkDeviceSize alignmentForPool = VMA_MAX(
15688 vkMemReq.alignment,
15689 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15694 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15699 return createInfo.
pool->m_BlockVector.Allocate(
15700 m_CurrentFrameIndex.load(),
15711 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15712 uint32_t memTypeIndex = UINT32_MAX;
15714 if(res == VK_SUCCESS)
15716 VkDeviceSize alignmentForMemType = VMA_MAX(
15717 vkMemReq.alignment,
15718 GetMemoryTypeMinAlignment(memTypeIndex));
15720 res = AllocateMemoryOfType(
15722 alignmentForMemType,
15723 requiresDedicatedAllocation || prefersDedicatedAllocation,
15732 if(res == VK_SUCCESS)
15742 memoryTypeBits &= ~(1u << memTypeIndex);
15745 if(res == VK_SUCCESS)
15747 alignmentForMemType = VMA_MAX(
15748 vkMemReq.alignment,
15749 GetMemoryTypeMinAlignment(memTypeIndex));
15751 res = AllocateMemoryOfType(
15753 alignmentForMemType,
15754 requiresDedicatedAllocation || prefersDedicatedAllocation,
15763 if(res == VK_SUCCESS)
15773 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15784 void VmaAllocator_T::FreeMemory(
15785 size_t allocationCount,
15788 VMA_ASSERT(pAllocations);
15790 for(
size_t allocIndex = allocationCount; allocIndex--; )
15794 if(allocation != VK_NULL_HANDLE)
15796 if(TouchAllocation(allocation))
15798 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15800 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15803 switch(allocation->GetType())
15805 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15807 VmaBlockVector* pBlockVector = VMA_NULL;
15808 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15809 if(hPool != VK_NULL_HANDLE)
15811 pBlockVector = &hPool->m_BlockVector;
15815 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15816 pBlockVector = m_pBlockVectors[memTypeIndex];
15818 pBlockVector->Free(allocation);
15821 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15822 FreeDedicatedMemory(allocation);
15830 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15831 allocation->SetUserData(
this, VMA_NULL);
15832 m_AllocationObjectAllocator.Free(allocation);
15837 VkResult VmaAllocator_T::ResizeAllocation(
15839 VkDeviceSize newSize)
15842 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15844 return VK_ERROR_VALIDATION_FAILED_EXT;
15846 if(newSize == alloc->GetSize())
15850 return VK_ERROR_OUT_OF_POOL_MEMORY;
15853 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15856 InitStatInfo(pStats->
total);
15857 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15859 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15863 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15865 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15866 VMA_ASSERT(pBlockVector);
15867 pBlockVector->AddStats(pStats);
15872 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15873 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15875 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15880 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15882 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15883 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15884 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15885 VMA_ASSERT(pDedicatedAllocVector);
15886 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15889 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15890 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15891 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15892 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15897 VmaPostprocessCalcStatInfo(pStats->
total);
15898 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15899 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15900 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15901 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15904 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15906 #if VMA_MEMORY_BUDGET
15907 if(m_UseExtMemoryBudget)
15909 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15911 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15912 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15914 const uint32_t heapIndex = firstHeap + i;
15916 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15919 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15921 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15922 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15926 outBudget->
usage = 0;
15930 outBudget->
budget = VMA_MIN(
15931 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15936 UpdateVulkanBudget();
15937 GetBudget(outBudget, firstHeap, heapCount);
15943 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15945 const uint32_t heapIndex = firstHeap + i;
15947 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15951 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15956 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15958 VkResult VmaAllocator_T::DefragmentationBegin(
15968 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15969 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15972 (*pContext)->AddAllocations(
15975 VkResult res = (*pContext)->Defragment(
15980 if(res != VK_NOT_READY)
15982 vma_delete(
this, *pContext);
15983 *pContext = VMA_NULL;
15989 VkResult VmaAllocator_T::DefragmentationEnd(
15992 vma_delete(
this, context);
15996 VkResult VmaAllocator_T::DefragmentationPassBegin(
16000 return context->DefragmentPassBegin(pInfo);
16002 VkResult VmaAllocator_T::DefragmentationPassEnd(
16005 return context->DefragmentPassEnd();
16011 if(hAllocation->CanBecomeLost())
16017 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16018 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16021 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16025 pAllocationInfo->
offset = 0;
16026 pAllocationInfo->
size = hAllocation->GetSize();
16028 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16031 else if(localLastUseFrameIndex == localCurrFrameIndex)
16033 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16034 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16035 pAllocationInfo->
offset = hAllocation->GetOffset();
16036 pAllocationInfo->
size = hAllocation->GetSize();
16038 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16043 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16045 localLastUseFrameIndex = localCurrFrameIndex;
16052 #if VMA_STATS_STRING_ENABLED
16053 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16054 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16057 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16058 if(localLastUseFrameIndex == localCurrFrameIndex)
16064 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16066 localLastUseFrameIndex = localCurrFrameIndex;
16072 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16073 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16074 pAllocationInfo->
offset = hAllocation->GetOffset();
16075 pAllocationInfo->
size = hAllocation->GetSize();
16076 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16077 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16081 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16084 if(hAllocation->CanBecomeLost())
16086 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16087 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16090 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16094 else if(localLastUseFrameIndex == localCurrFrameIndex)
16100 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16102 localLastUseFrameIndex = localCurrFrameIndex;
16109 #if VMA_STATS_STRING_ENABLED
16110 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16111 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16114 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16115 if(localLastUseFrameIndex == localCurrFrameIndex)
16121 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16123 localLastUseFrameIndex = localCurrFrameIndex;
16135 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16145 return VK_ERROR_INITIALIZATION_FAILED;
16149 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16151 return VK_ERROR_FEATURE_NOT_PRESENT;
16154 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16156 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16158 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16159 if(res != VK_SUCCESS)
16161 vma_delete(
this, *pPool);
16168 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16169 (*pPool)->SetId(m_NextPoolId++);
16170 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16176 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16180 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16181 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16182 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16185 vma_delete(
this, pool);
16190 pool->m_BlockVector.GetPoolStats(pPoolStats);
16193 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16195 m_CurrentFrameIndex.store(frameIndex);
16197 #if VMA_MEMORY_BUDGET
16198 if(m_UseExtMemoryBudget)
16200 UpdateVulkanBudget();
16202 #endif // #if VMA_MEMORY_BUDGET
16205 void VmaAllocator_T::MakePoolAllocationsLost(
16207 size_t* pLostAllocationCount)
16209 hPool->m_BlockVector.MakePoolAllocationsLost(
16210 m_CurrentFrameIndex.load(),
16211 pLostAllocationCount);
16214 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16216 return hPool->m_BlockVector.CheckCorruption();
16219 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16221 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16224 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16226 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16228 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16229 VMA_ASSERT(pBlockVector);
16230 VkResult localRes = pBlockVector->CheckCorruption();
16233 case VK_ERROR_FEATURE_NOT_PRESENT:
16236 finalRes = VK_SUCCESS;
16246 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16247 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16249 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16251 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16254 case VK_ERROR_FEATURE_NOT_PRESENT:
16257 finalRes = VK_SUCCESS;
16269 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16271 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16272 (*pAllocation)->InitLost();
16275 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16277 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16280 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16282 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16283 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16286 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16287 if(blockBytesAfterAllocation > heapSize)
16289 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16291 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16299 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16303 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16305 if(res == VK_SUCCESS)
16307 #if VMA_MEMORY_BUDGET
16308 ++m_Budget.m_OperationsSinceBudgetFetch;
16312 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16314 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
16319 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16325 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16328 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16330 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
16334 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16336 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16339 VkResult VmaAllocator_T::BindVulkanBuffer(
16340 VkDeviceMemory memory,
16341 VkDeviceSize memoryOffset,
16345 if(pNext != VMA_NULL)
16347 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16348 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16349 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
16351 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
16352 bindBufferMemoryInfo.pNext = pNext;
16353 bindBufferMemoryInfo.buffer = buffer;
16354 bindBufferMemoryInfo.memory = memory;
16355 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16356 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16359 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16361 return VK_ERROR_EXTENSION_NOT_PRESENT;
16366 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
16370 VkResult VmaAllocator_T::BindVulkanImage(
16371 VkDeviceMemory memory,
16372 VkDeviceSize memoryOffset,
16376 if(pNext != VMA_NULL)
16378 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16379 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16380 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
16382 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
16383 bindBufferMemoryInfo.pNext = pNext;
16384 bindBufferMemoryInfo.image = image;
16385 bindBufferMemoryInfo.memory = memory;
16386 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16387 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16390 #endif // #if VMA_BIND_MEMORY2
16392 return VK_ERROR_EXTENSION_NOT_PRESENT;
16397 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
16401 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
16403 if(hAllocation->CanBecomeLost())
16405 return VK_ERROR_MEMORY_MAP_FAILED;
16408 switch(hAllocation->GetType())
16410 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16412 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16413 char *pBytes = VMA_NULL;
16414 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
16415 if(res == VK_SUCCESS)
16417 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
16418 hAllocation->BlockAllocMap();
16422 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16423 return hAllocation->DedicatedAllocMap(
this, ppData);
16426 return VK_ERROR_MEMORY_MAP_FAILED;
16432 switch(hAllocation->GetType())
16434 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16436 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16437 hAllocation->BlockAllocUnmap();
16438 pBlock->Unmap(
this, 1);
16441 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16442 hAllocation->DedicatedAllocUnmap(
this);
16449 VkResult VmaAllocator_T::BindBufferMemory(
16451 VkDeviceSize allocationLocalOffset,
16455 VkResult res = VK_SUCCESS;
16456 switch(hAllocation->GetType())
16458 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16459 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16461 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16463 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16464 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16465 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16474 VkResult VmaAllocator_T::BindImageMemory(
16476 VkDeviceSize allocationLocalOffset,
16480 VkResult res = VK_SUCCESS;
16481 switch(hAllocation->GetType())
16483 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16484 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16486 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16488 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16489 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16490 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16499 void VmaAllocator_T::FlushOrInvalidateAllocation(
16501 VkDeviceSize offset, VkDeviceSize size,
16502 VMA_CACHE_OPERATION op)
16504 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16505 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16507 const VkDeviceSize allocationSize = hAllocation->GetSize();
16508 VMA_ASSERT(offset <= allocationSize);
16510 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16512 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16513 memRange.memory = hAllocation->GetMemory();
16515 switch(hAllocation->GetType())
16517 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16518 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16519 if(size == VK_WHOLE_SIZE)
16521 memRange.size = allocationSize - memRange.offset;
16525 VMA_ASSERT(offset + size <= allocationSize);
16526 memRange.size = VMA_MIN(
16527 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16528 allocationSize - memRange.offset);
16532 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16535 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16536 if(size == VK_WHOLE_SIZE)
16538 size = allocationSize - offset;
16542 VMA_ASSERT(offset + size <= allocationSize);
16544 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16547 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16548 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16549 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16550 memRange.offset += allocationOffset;
16551 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16562 case VMA_CACHE_FLUSH:
16563 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16565 case VMA_CACHE_INVALIDATE:
16566 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16575 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16577 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16579 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16581 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16582 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16583 VMA_ASSERT(pDedicatedAllocations);
16584 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16585 VMA_ASSERT(success);
16588 VkDeviceMemory hMemory = allocation->GetMemory();
16600 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16602 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16605 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16607 VkBufferCreateInfo dummyBufCreateInfo;
16608 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16610 uint32_t memoryTypeBits = 0;
16613 VkBuffer buf = VK_NULL_HANDLE;
16614 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16615 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16616 if(res == VK_SUCCESS)
16619 VkMemoryRequirements memReq;
16620 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16621 memoryTypeBits = memReq.memoryTypeBits;
16624 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16627 return memoryTypeBits;
16630 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
16633 VMA_ASSERT(GetMemoryTypeCount() > 0);
16635 uint32_t memoryTypeBits = UINT32_MAX;
16637 if(!m_UseAmdDeviceCoherentMemory)
16640 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16642 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
16644 memoryTypeBits &= ~(1u << memTypeIndex);
16649 return memoryTypeBits;
16652 #if VMA_MEMORY_BUDGET
16654 void VmaAllocator_T::UpdateVulkanBudget()
16656 VMA_ASSERT(m_UseExtMemoryBudget);
16658 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16660 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16661 memProps.pNext = &budgetProps;
16663 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16666 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16668 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16670 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16671 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16672 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16674 m_Budget.m_OperationsSinceBudgetFetch = 0;
16678 #endif // #if VMA_MEMORY_BUDGET
16680 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16682 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16683 !hAllocation->CanBecomeLost() &&
16684 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16686 void* pData = VMA_NULL;
16687 VkResult res = Map(hAllocation, &pData);
16688 if(res == VK_SUCCESS)
16690 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16691 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16692 Unmap(hAllocation);
16696 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16701 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16703 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16704 if(memoryTypeBits == UINT32_MAX)
16706 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16707 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16709 return memoryTypeBits;
16712 #if VMA_STATS_STRING_ENABLED
16714 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16716 bool dedicatedAllocationsStarted =
false;
16717 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16719 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16720 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16721 VMA_ASSERT(pDedicatedAllocVector);
16722 if(pDedicatedAllocVector->empty() ==
false)
16724 if(dedicatedAllocationsStarted ==
false)
16726 dedicatedAllocationsStarted =
true;
16727 json.WriteString(
"DedicatedAllocations");
16728 json.BeginObject();
16731 json.BeginString(
"Type ");
16732 json.ContinueString(memTypeIndex);
16737 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16739 json.BeginObject(
true);
16741 hAlloc->PrintParameters(json);
16748 if(dedicatedAllocationsStarted)
16754 bool allocationsStarted =
false;
16755 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16757 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16759 if(allocationsStarted ==
false)
16761 allocationsStarted =
true;
16762 json.WriteString(
"DefaultPools");
16763 json.BeginObject();
16766 json.BeginString(
"Type ");
16767 json.ContinueString(memTypeIndex);
16770 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16773 if(allocationsStarted)
16781 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16782 const size_t poolCount = m_Pools.size();
16785 json.WriteString(
"Pools");
16786 json.BeginObject();
16787 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16789 json.BeginString();
16790 json.ContinueString(m_Pools[poolIndex]->GetId());
16793 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16800 #endif // #if VMA_STATS_STRING_ENABLED
16809 VMA_ASSERT(pCreateInfo && pAllocator);
16812 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16814 return (*pAllocator)->Init(pCreateInfo);
16820 if(allocator != VK_NULL_HANDLE)
16822 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16823 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16824 vma_delete(&allocationCallbacks, allocator);
16830 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16832 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16833 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16838 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16840 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16841 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16846 uint32_t memoryTypeIndex,
16847 VkMemoryPropertyFlags* pFlags)
16849 VMA_ASSERT(allocator && pFlags);
16850 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16851 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16856 uint32_t frameIndex)
16858 VMA_ASSERT(allocator);
16859 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16861 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16863 allocator->SetCurrentFrameIndex(frameIndex);
16870 VMA_ASSERT(allocator && pStats);
16871 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16872 allocator->CalculateStats(pStats);
16879 VMA_ASSERT(allocator && pBudget);
16880 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16881 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16884 #if VMA_STATS_STRING_ENABLED
16888 char** ppStatsString,
16889 VkBool32 detailedMap)
16891 VMA_ASSERT(allocator && ppStatsString);
16892 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16894 VmaStringBuilder sb(allocator);
16896 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16897 json.BeginObject();
16900 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
16903 allocator->CalculateStats(&stats);
16905 json.WriteString(
"Total");
16906 VmaPrintStatInfo(json, stats.
total);
16908 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16910 json.BeginString(
"Heap ");
16911 json.ContinueString(heapIndex);
16913 json.BeginObject();
16915 json.WriteString(
"Size");
16916 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16918 json.WriteString(
"Flags");
16919 json.BeginArray(
true);
16920 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16922 json.WriteString(
"DEVICE_LOCAL");
16926 json.WriteString(
"Budget");
16927 json.BeginObject();
16929 json.WriteString(
"BlockBytes");
16930 json.WriteNumber(budget[heapIndex].blockBytes);
16931 json.WriteString(
"AllocationBytes");
16932 json.WriteNumber(budget[heapIndex].allocationBytes);
16933 json.WriteString(
"Usage");
16934 json.WriteNumber(budget[heapIndex].usage);
16935 json.WriteString(
"Budget");
16936 json.WriteNumber(budget[heapIndex].budget);
16942 json.WriteString(
"Stats");
16943 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16946 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16948 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16950 json.BeginString(
"Type ");
16951 json.ContinueString(typeIndex);
16954 json.BeginObject();
16956 json.WriteString(
"Flags");
16957 json.BeginArray(
true);
16958 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16959 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16961 json.WriteString(
"DEVICE_LOCAL");
16963 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16965 json.WriteString(
"HOST_VISIBLE");
16967 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
16969 json.WriteString(
"HOST_COHERENT");
16971 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
16973 json.WriteString(
"HOST_CACHED");
16975 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
16977 json.WriteString(
"LAZILY_ALLOCATED");
16979 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
16981 json.WriteString(
" PROTECTED");
16983 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
16985 json.WriteString(
" DEVICE_COHERENT");
16987 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
16989 json.WriteString(
" DEVICE_UNCACHED");
16995 json.WriteString(
"Stats");
16996 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17005 if(detailedMap == VK_TRUE)
17007 allocator->PrintDetailedMap(json);
17013 const size_t len = sb.GetLength();
17014 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17017 memcpy(pChars, sb.GetData(), len);
17019 pChars[len] =
'\0';
17020 *ppStatsString = pChars;
17025 char* pStatsString)
17027 if(pStatsString != VMA_NULL)
17029 VMA_ASSERT(allocator);
17030 size_t len = strlen(pStatsString);
17031 vma_delete_array(allocator, pStatsString, len + 1);
17035 #endif // #if VMA_STATS_STRING_ENABLED
17042 uint32_t memoryTypeBits,
17044 uint32_t* pMemoryTypeIndex)
17046 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17047 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17048 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17050 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17057 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17058 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17059 uint32_t notPreferredFlags = 0;
17062 switch(pAllocationCreateInfo->
usage)
17067 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17069 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17073 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17076 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17077 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17079 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17083 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17084 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17087 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17090 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17099 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17101 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17104 *pMemoryTypeIndex = UINT32_MAX;
17105 uint32_t minCost = UINT32_MAX;
17106 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17107 memTypeIndex < allocator->GetMemoryTypeCount();
17108 ++memTypeIndex, memTypeBit <<= 1)
17111 if((memTypeBit & memoryTypeBits) != 0)
17113 const VkMemoryPropertyFlags currFlags =
17114 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17116 if((requiredFlags & ~currFlags) == 0)
17119 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17120 VmaCountBitsSet(currFlags & notPreferredFlags);
17122 if(currCost < minCost)
17124 *pMemoryTypeIndex = memTypeIndex;
17129 minCost = currCost;
17134 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17139 const VkBufferCreateInfo* pBufferCreateInfo,
17141 uint32_t* pMemoryTypeIndex)
17143 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17144 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17145 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17146 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17148 const VkDevice hDev = allocator->m_hDevice;
17149 VkBuffer hBuffer = VK_NULL_HANDLE;
17150 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17151 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17152 if(res == VK_SUCCESS)
17154 VkMemoryRequirements memReq = {};
17155 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17156 hDev, hBuffer, &memReq);
17160 memReq.memoryTypeBits,
17161 pAllocationCreateInfo,
17164 allocator->GetVulkanFunctions().vkDestroyBuffer(
17165 hDev, hBuffer, allocator->GetAllocationCallbacks());
17172 const VkImageCreateInfo* pImageCreateInfo,
17174 uint32_t* pMemoryTypeIndex)
17176 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17177 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17178 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17179 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17181 const VkDevice hDev = allocator->m_hDevice;
17182 VkImage hImage = VK_NULL_HANDLE;
17183 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17184 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17185 if(res == VK_SUCCESS)
17187 VkMemoryRequirements memReq = {};
17188 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17189 hDev, hImage, &memReq);
17193 memReq.memoryTypeBits,
17194 pAllocationCreateInfo,
17197 allocator->GetVulkanFunctions().vkDestroyImage(
17198 hDev, hImage, allocator->GetAllocationCallbacks());
17208 VMA_ASSERT(allocator && pCreateInfo && pPool);
17210 VMA_DEBUG_LOG(
"vmaCreatePool");
17212 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17214 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17216 #if VMA_RECORDING_ENABLED
17217 if(allocator->GetRecorder() != VMA_NULL)
17219 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17230 VMA_ASSERT(allocator);
17232 if(pool == VK_NULL_HANDLE)
17237 VMA_DEBUG_LOG(
"vmaDestroyPool");
17239 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17241 #if VMA_RECORDING_ENABLED
17242 if(allocator->GetRecorder() != VMA_NULL)
17244 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17248 allocator->DestroyPool(pool);
17256 VMA_ASSERT(allocator && pool && pPoolStats);
17258 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17260 allocator->GetPoolStats(pool, pPoolStats);
17266 size_t* pLostAllocationCount)
17268 VMA_ASSERT(allocator && pool);
17270 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17272 #if VMA_RECORDING_ENABLED
17273 if(allocator->GetRecorder() != VMA_NULL)
17275 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
17279 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
17284 VMA_ASSERT(allocator && pool);
17286 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17288 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
17290 return allocator->CheckPoolCorruption(pool);
17296 const char** ppName)
17298 VMA_ASSERT(allocator && pool);
17300 VMA_DEBUG_LOG(
"vmaGetPoolName");
17302 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17304 *ppName = pool->GetName();
17312 VMA_ASSERT(allocator && pool);
17314 VMA_DEBUG_LOG(
"vmaSetPoolName");
17316 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17318 pool->SetName(pName);
17320 #if VMA_RECORDING_ENABLED
17321 if(allocator->GetRecorder() != VMA_NULL)
17323 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
17330 const VkMemoryRequirements* pVkMemoryRequirements,
17335 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
17337 VMA_DEBUG_LOG(
"vmaAllocateMemory");
17339 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17341 VkResult result = allocator->AllocateMemory(
17342 *pVkMemoryRequirements,
17348 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17352 #if VMA_RECORDING_ENABLED
17353 if(allocator->GetRecorder() != VMA_NULL)
17355 allocator->GetRecorder()->RecordAllocateMemory(
17356 allocator->GetCurrentFrameIndex(),
17357 *pVkMemoryRequirements,
17363 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17365 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17373 const VkMemoryRequirements* pVkMemoryRequirements,
17375 size_t allocationCount,
17379 if(allocationCount == 0)
17384 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
17386 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
17388 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17390 VkResult result = allocator->AllocateMemory(
17391 *pVkMemoryRequirements,
17397 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17401 #if VMA_RECORDING_ENABLED
17402 if(allocator->GetRecorder() != VMA_NULL)
17404 allocator->GetRecorder()->RecordAllocateMemoryPages(
17405 allocator->GetCurrentFrameIndex(),
17406 *pVkMemoryRequirements,
17408 (uint64_t)allocationCount,
17413 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17415 for(
size_t i = 0; i < allocationCount; ++i)
17417 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
17431 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17433 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
17435 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17437 VkMemoryRequirements vkMemReq = {};
17438 bool requiresDedicatedAllocation =
false;
17439 bool prefersDedicatedAllocation =
false;
17440 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
17441 requiresDedicatedAllocation,
17442 prefersDedicatedAllocation);
17444 VkResult result = allocator->AllocateMemory(
17446 requiresDedicatedAllocation,
17447 prefersDedicatedAllocation,
17451 VMA_SUBALLOCATION_TYPE_BUFFER,
17455 #if VMA_RECORDING_ENABLED
17456 if(allocator->GetRecorder() != VMA_NULL)
17458 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
17459 allocator->GetCurrentFrameIndex(),
17461 requiresDedicatedAllocation,
17462 prefersDedicatedAllocation,
17468 if(pAllocationInfo && result == VK_SUCCESS)
17470 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17483 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17485 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17487 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17489 VkMemoryRequirements vkMemReq = {};
17490 bool requiresDedicatedAllocation =
false;
17491 bool prefersDedicatedAllocation =
false;
17492 allocator->GetImageMemoryRequirements(image, vkMemReq,
17493 requiresDedicatedAllocation, prefersDedicatedAllocation);
17495 VkResult result = allocator->AllocateMemory(
17497 requiresDedicatedAllocation,
17498 prefersDedicatedAllocation,
17502 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17506 #if VMA_RECORDING_ENABLED
17507 if(allocator->GetRecorder() != VMA_NULL)
17509 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17510 allocator->GetCurrentFrameIndex(),
17512 requiresDedicatedAllocation,
17513 prefersDedicatedAllocation,
17519 if(pAllocationInfo && result == VK_SUCCESS)
17521 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17531 VMA_ASSERT(allocator);
17533 if(allocation == VK_NULL_HANDLE)
17538 VMA_DEBUG_LOG(
"vmaFreeMemory");
17540 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17542 #if VMA_RECORDING_ENABLED
17543 if(allocator->GetRecorder() != VMA_NULL)
17545 allocator->GetRecorder()->RecordFreeMemory(
17546 allocator->GetCurrentFrameIndex(),
17551 allocator->FreeMemory(
17558 size_t allocationCount,
17561 if(allocationCount == 0)
17566 VMA_ASSERT(allocator);
17568 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17570 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17572 #if VMA_RECORDING_ENABLED
17573 if(allocator->GetRecorder() != VMA_NULL)
17575 allocator->GetRecorder()->RecordFreeMemoryPages(
17576 allocator->GetCurrentFrameIndex(),
17577 (uint64_t)allocationCount,
17582 allocator->FreeMemory(allocationCount, pAllocations);
17588 VkDeviceSize newSize)
17590 VMA_ASSERT(allocator && allocation);
17592 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17594 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17596 return allocator->ResizeAllocation(allocation, newSize);
17604 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17606 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17608 #if VMA_RECORDING_ENABLED
17609 if(allocator->GetRecorder() != VMA_NULL)
17611 allocator->GetRecorder()->RecordGetAllocationInfo(
17612 allocator->GetCurrentFrameIndex(),
17617 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17624 VMA_ASSERT(allocator && allocation);
17626 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17628 #if VMA_RECORDING_ENABLED
17629 if(allocator->GetRecorder() != VMA_NULL)
17631 allocator->GetRecorder()->RecordTouchAllocation(
17632 allocator->GetCurrentFrameIndex(),
17637 return allocator->TouchAllocation(allocation);
17645 VMA_ASSERT(allocator && allocation);
17647 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17649 allocation->SetUserData(allocator, pUserData);
17651 #if VMA_RECORDING_ENABLED
17652 if(allocator->GetRecorder() != VMA_NULL)
17654 allocator->GetRecorder()->RecordSetAllocationUserData(
17655 allocator->GetCurrentFrameIndex(),
17666 VMA_ASSERT(allocator && pAllocation);
17668 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17670 allocator->CreateLostAllocation(pAllocation);
17672 #if VMA_RECORDING_ENABLED
17673 if(allocator->GetRecorder() != VMA_NULL)
17675 allocator->GetRecorder()->RecordCreateLostAllocation(
17676 allocator->GetCurrentFrameIndex(),
17687 VMA_ASSERT(allocator && allocation && ppData);
17689 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17691 VkResult res = allocator->Map(allocation, ppData);
17693 #if VMA_RECORDING_ENABLED
17694 if(allocator->GetRecorder() != VMA_NULL)
17696 allocator->GetRecorder()->RecordMapMemory(
17697 allocator->GetCurrentFrameIndex(),
17709 VMA_ASSERT(allocator && allocation);
17711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17713 #if VMA_RECORDING_ENABLED
17714 if(allocator->GetRecorder() != VMA_NULL)
17716 allocator->GetRecorder()->RecordUnmapMemory(
17717 allocator->GetCurrentFrameIndex(),
17722 allocator->Unmap(allocation);
17727 VMA_ASSERT(allocator && allocation);
17729 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17731 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17733 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17735 #if VMA_RECORDING_ENABLED
17736 if(allocator->GetRecorder() != VMA_NULL)
17738 allocator->GetRecorder()->RecordFlushAllocation(
17739 allocator->GetCurrentFrameIndex(),
17740 allocation, offset, size);
17747 VMA_ASSERT(allocator && allocation);
17749 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17751 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17753 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17755 #if VMA_RECORDING_ENABLED
17756 if(allocator->GetRecorder() != VMA_NULL)
17758 allocator->GetRecorder()->RecordInvalidateAllocation(
17759 allocator->GetCurrentFrameIndex(),
17760 allocation, offset, size);
17767 VMA_ASSERT(allocator);
17769 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17771 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17773 return allocator->CheckCorruption(memoryTypeBits);
17779 size_t allocationCount,
17780 VkBool32* pAllocationsChanged,
17790 if(pDefragmentationInfo != VMA_NULL)
17804 if(res == VK_NOT_READY)
17817 VMA_ASSERT(allocator && pInfo && pContext);
17828 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17830 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17832 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17834 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17836 #if VMA_RECORDING_ENABLED
17837 if(allocator->GetRecorder() != VMA_NULL)
17839 allocator->GetRecorder()->RecordDefragmentationBegin(
17840 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17851 VMA_ASSERT(allocator);
17853 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17855 if(context != VK_NULL_HANDLE)
17857 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17859 #if VMA_RECORDING_ENABLED
17860 if(allocator->GetRecorder() != VMA_NULL)
17862 allocator->GetRecorder()->RecordDefragmentationEnd(
17863 allocator->GetCurrentFrameIndex(), context);
17867 return allocator->DefragmentationEnd(context);
17881 VMA_ASSERT(allocator);
17883 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
moveCount, pInfo->
pMoves));
17885 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
17887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17889 if(context == VK_NULL_HANDLE)
17895 return allocator->DefragmentationPassBegin(pInfo, context);
17901 VMA_ASSERT(allocator);
17903 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
17904 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17906 if(context == VK_NULL_HANDLE)
17909 return allocator->DefragmentationPassEnd(context);
17917 VMA_ASSERT(allocator && allocation && buffer);
17919 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17921 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17923 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17929 VkDeviceSize allocationLocalOffset,
17933 VMA_ASSERT(allocator && allocation && buffer);
17935 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17937 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17939 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17947 VMA_ASSERT(allocator && allocation && image);
17949 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17951 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17953 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17959 VkDeviceSize allocationLocalOffset,
17963 VMA_ASSERT(allocator && allocation && image);
17965 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
17967 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17969 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
17974 const VkBufferCreateInfo* pBufferCreateInfo,
17980 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
17982 if(pBufferCreateInfo->size == 0)
17984 return VK_ERROR_VALIDATION_FAILED_EXT;
17987 VMA_DEBUG_LOG(
"vmaCreateBuffer");
17989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17991 *pBuffer = VK_NULL_HANDLE;
17992 *pAllocation = VK_NULL_HANDLE;
17995 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17996 allocator->m_hDevice,
17998 allocator->GetAllocationCallbacks(),
18003 VkMemoryRequirements vkMemReq = {};
18004 bool requiresDedicatedAllocation =
false;
18005 bool prefersDedicatedAllocation =
false;
18006 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18007 requiresDedicatedAllocation, prefersDedicatedAllocation);
18011 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
18013 VMA_ASSERT(vkMemReq.alignment %
18014 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
18016 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
18018 VMA_ASSERT(vkMemReq.alignment %
18019 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
18021 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
18023 VMA_ASSERT(vkMemReq.alignment %
18024 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
18028 res = allocator->AllocateMemory(
18030 requiresDedicatedAllocation,
18031 prefersDedicatedAllocation,
18034 *pAllocationCreateInfo,
18035 VMA_SUBALLOCATION_TYPE_BUFFER,
18039 #if VMA_RECORDING_ENABLED
18040 if(allocator->GetRecorder() != VMA_NULL)
18042 allocator->GetRecorder()->RecordCreateBuffer(
18043 allocator->GetCurrentFrameIndex(),
18044 *pBufferCreateInfo,
18045 *pAllocationCreateInfo,
18055 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18060 #if VMA_STATS_STRING_ENABLED
18061 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18063 if(pAllocationInfo != VMA_NULL)
18065 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18070 allocator->FreeMemory(
18073 *pAllocation = VK_NULL_HANDLE;
18074 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18075 *pBuffer = VK_NULL_HANDLE;
18078 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18079 *pBuffer = VK_NULL_HANDLE;
18090 VMA_ASSERT(allocator);
18092 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18097 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18099 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18101 #if VMA_RECORDING_ENABLED
18102 if(allocator->GetRecorder() != VMA_NULL)
18104 allocator->GetRecorder()->RecordDestroyBuffer(
18105 allocator->GetCurrentFrameIndex(),
18110 if(buffer != VK_NULL_HANDLE)
18112 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18115 if(allocation != VK_NULL_HANDLE)
18117 allocator->FreeMemory(
18125 const VkImageCreateInfo* pImageCreateInfo,
18131 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18133 if(pImageCreateInfo->extent.width == 0 ||
18134 pImageCreateInfo->extent.height == 0 ||
18135 pImageCreateInfo->extent.depth == 0 ||
18136 pImageCreateInfo->mipLevels == 0 ||
18137 pImageCreateInfo->arrayLayers == 0)
18139 return VK_ERROR_VALIDATION_FAILED_EXT;
18142 VMA_DEBUG_LOG(
"vmaCreateImage");
18144 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18146 *pImage = VK_NULL_HANDLE;
18147 *pAllocation = VK_NULL_HANDLE;
18150 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18151 allocator->m_hDevice,
18153 allocator->GetAllocationCallbacks(),
18157 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18158 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18159 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18162 VkMemoryRequirements vkMemReq = {};
18163 bool requiresDedicatedAllocation =
false;
18164 bool prefersDedicatedAllocation =
false;
18165 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18166 requiresDedicatedAllocation, prefersDedicatedAllocation);
18168 res = allocator->AllocateMemory(
18170 requiresDedicatedAllocation,
18171 prefersDedicatedAllocation,
18174 *pAllocationCreateInfo,
18179 #if VMA_RECORDING_ENABLED
18180 if(allocator->GetRecorder() != VMA_NULL)
18182 allocator->GetRecorder()->RecordCreateImage(
18183 allocator->GetCurrentFrameIndex(),
18185 *pAllocationCreateInfo,
18195 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18200 #if VMA_STATS_STRING_ENABLED
18201 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18203 if(pAllocationInfo != VMA_NULL)
18205 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18210 allocator->FreeMemory(
18213 *pAllocation = VK_NULL_HANDLE;
18214 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18215 *pImage = VK_NULL_HANDLE;
18218 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18219 *pImage = VK_NULL_HANDLE;
18230 VMA_ASSERT(allocator);
18232 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18237 VMA_DEBUG_LOG(
"vmaDestroyImage");
18239 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18241 #if VMA_RECORDING_ENABLED
18242 if(allocator->GetRecorder() != VMA_NULL)
18244 allocator->GetRecorder()->RecordDestroyImage(
18245 allocator->GetCurrentFrameIndex(),
18250 if(image != VK_NULL_HANDLE)
18252 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
18254 if(allocation != VK_NULL_HANDLE)
18256 allocator->FreeMemory(
18262 #endif // #ifdef VMA_IMPLEMENTATION