23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1893 #if VMA_RECORDING_ENABLED
1896 #include <windows.h>
1911 #ifndef VMA_RECORDING_ENABLED
1912 #define VMA_RECORDING_ENABLED 0
1916 #define NOMINMAX // For windows.h
1919 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
1920 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
1921 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
1922 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
1923 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
1924 extern PFN_vkAllocateMemory vkAllocateMemory;
1925 extern PFN_vkFreeMemory vkFreeMemory;
1926 extern PFN_vkMapMemory vkMapMemory;
1927 extern PFN_vkUnmapMemory vkUnmapMemory;
1928 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
1929 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
1930 extern PFN_vkBindBufferMemory vkBindBufferMemory;
1931 extern PFN_vkBindImageMemory vkBindImageMemory;
1932 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
1933 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
1934 extern PFN_vkCreateBuffer vkCreateBuffer;
1935 extern PFN_vkDestroyBuffer vkDestroyBuffer;
1936 extern PFN_vkCreateImage vkCreateImage;
1937 extern PFN_vkDestroyImage vkDestroyImage;
1938 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
1939 #if VMA_VULKAN_VERSION >= 1001000
1940 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
1941 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
1942 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
1943 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
1944 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
1945 #endif // #if VMA_VULKAN_VERSION >= 1001000
1946 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
1949 #include <vulkan/vulkan.h>
1955 #if !defined(VMA_VULKAN_VERSION)
1956 #if defined(VK_VERSION_1_2)
1957 #define VMA_VULKAN_VERSION 1002000
1958 #elif defined(VK_VERSION_1_1)
1959 #define VMA_VULKAN_VERSION 1001000
1961 #define VMA_VULKAN_VERSION 1000000
1965 #if !defined(VMA_DEDICATED_ALLOCATION)
1966 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1967 #define VMA_DEDICATED_ALLOCATION 1
1969 #define VMA_DEDICATED_ALLOCATION 0
1973 #if !defined(VMA_BIND_MEMORY2)
1974 #if VK_KHR_bind_memory2
1975 #define VMA_BIND_MEMORY2 1
1977 #define VMA_BIND_MEMORY2 0
1981 #if !defined(VMA_MEMORY_BUDGET)
1982 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1983 #define VMA_MEMORY_BUDGET 1
1985 #define VMA_MEMORY_BUDGET 0
1990 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
1991 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
1992 #define VMA_BUFFER_DEVICE_ADDRESS 1
1994 #define VMA_BUFFER_DEVICE_ADDRESS 0
2003 #ifndef VMA_CALL_PRE
2004 #define VMA_CALL_PRE
2006 #ifndef VMA_CALL_POST
2007 #define VMA_CALL_POST
2021 #ifndef VMA_LEN_IF_NOT_NULL
2022 #define VMA_LEN_IF_NOT_NULL(len)
2027 #ifndef VMA_NULLABLE
2029 #define VMA_NULLABLE _Nullable
2031 #define VMA_NULLABLE
2037 #ifndef VMA_NOT_NULL
2039 #define VMA_NOT_NULL _Nonnull
2041 #define VMA_NOT_NULL
2047 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2048 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2049 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2051 #define VMA_NOT_NULL_NON_DISPATCHABLE
2055 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2056 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2057 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2059 #define VMA_NULLABLE_NON_DISPATCHABLE
2077 uint32_t memoryType,
2078 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2080 void* VMA_NULLABLE pUserData);
2084 uint32_t memoryType,
2085 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2087 void* VMA_NULLABLE pUserData);
2227 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2228 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2229 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2231 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2232 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2233 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2235 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2236 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2326 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2399 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2407 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2417 uint32_t memoryTypeIndex,
2418 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2430 uint32_t frameIndex);
2526 #ifndef VMA_STATS_STRING_ENABLED
2527 #define VMA_STATS_STRING_ENABLED 1
2530 #if VMA_STATS_STRING_ENABLED
2537 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2538 VkBool32 detailedMap);
2542 char* VMA_NULLABLE pStatsString);
2544 #endif // #if VMA_STATS_STRING_ENABLED
2796 uint32_t memoryTypeBits,
2798 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2814 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2816 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2832 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2834 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2978 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3006 size_t* VMA_NULLABLE pLostAllocationCount);
3033 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3043 const char* VMA_NULLABLE pName);
3132 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3158 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3160 size_t allocationCount,
3161 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3162 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3172 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3180 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3205 size_t allocationCount,
3206 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3218 VkDeviceSize newSize);
3275 void* VMA_NULLABLE pUserData);
3332 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3370 VkDeviceSize offset,
3397 VkDeviceSize offset,
3416 uint32_t allocationCount,
3417 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3418 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3419 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3437 uint32_t allocationCount,
3438 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3439 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3440 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3519 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3553 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3691 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3692 size_t allocationCount,
3693 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3712 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3727 VkDeviceSize allocationLocalOffset,
3728 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3729 const void* VMA_NULLABLE pNext);
3746 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3761 VkDeviceSize allocationLocalOffset,
3762 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3763 const void* VMA_NULLABLE pNext);
3793 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3795 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3812 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3818 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3820 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3837 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
3844 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3847 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3848 #define VMA_IMPLEMENTATION
3851 #ifdef VMA_IMPLEMENTATION
3852 #undef VMA_IMPLEMENTATION
3872 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3873 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3882 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
3883 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
3895 #if VMA_USE_STL_CONTAINERS
3896 #define VMA_USE_STL_VECTOR 1
3897 #define VMA_USE_STL_UNORDERED_MAP 1
3898 #define VMA_USE_STL_LIST 1
3901 #ifndef VMA_USE_STL_SHARED_MUTEX
3903 #if __cplusplus >= 201703L
3904 #define VMA_USE_STL_SHARED_MUTEX 1
3908 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3909 #define VMA_USE_STL_SHARED_MUTEX 1
3911 #define VMA_USE_STL_SHARED_MUTEX 0
3919 #if VMA_USE_STL_VECTOR
3923 #if VMA_USE_STL_UNORDERED_MAP
3924 #include <unordered_map>
3927 #if VMA_USE_STL_LIST
3936 #include <algorithm>
3941 #define VMA_NULL nullptr
3944 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3946 void *aligned_alloc(
size_t alignment,
size_t size)
3949 if(alignment <
sizeof(
void*))
3951 alignment =
sizeof(
void*);
3954 return memalign(alignment, size);
3956 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3958 void *aligned_alloc(
size_t alignment,
size_t size)
3961 if(alignment <
sizeof(
void*))
3963 alignment =
sizeof(
void*);
3967 if(posix_memalign(&pointer, alignment, size) == 0)
3981 #define VMA_ASSERT(expr)
3983 #define VMA_ASSERT(expr) assert(expr)
3989 #ifndef VMA_HEAVY_ASSERT
3991 #define VMA_HEAVY_ASSERT(expr)
3993 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3997 #ifndef VMA_ALIGN_OF
3998 #define VMA_ALIGN_OF(type) (__alignof(type))
4001 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4003 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
4005 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
4009 #ifndef VMA_SYSTEM_FREE
4011 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
4013 #define VMA_SYSTEM_FREE(ptr) free(ptr)
4018 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4022 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4026 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4030 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4033 #ifndef VMA_DEBUG_LOG
4034 #define VMA_DEBUG_LOG(format, ...)
4044 #if VMA_STATS_STRING_ENABLED
4045 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4047 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4049 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4051 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4053 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4055 snprintf(outStr, strLen,
"%p", ptr);
4063 void Lock() { m_Mutex.lock(); }
4064 void Unlock() { m_Mutex.unlock(); }
4065 bool TryLock() {
return m_Mutex.try_lock(); }
4069 #define VMA_MUTEX VmaMutex
4073 #ifndef VMA_RW_MUTEX
4074 #if VMA_USE_STL_SHARED_MUTEX
4076 #include <shared_mutex>
4080 void LockRead() { m_Mutex.lock_shared(); }
4081 void UnlockRead() { m_Mutex.unlock_shared(); }
4082 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4083 void LockWrite() { m_Mutex.lock(); }
4084 void UnlockWrite() { m_Mutex.unlock(); }
4085 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4087 std::shared_mutex m_Mutex;
4089 #define VMA_RW_MUTEX VmaRWMutex
4090 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4096 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4097 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4098 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4099 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4100 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4101 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4102 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4106 #define VMA_RW_MUTEX VmaRWMutex
4112 void LockRead() { m_Mutex.Lock(); }
4113 void UnlockRead() { m_Mutex.Unlock(); }
4114 bool TryLockRead() {
return m_Mutex.TryLock(); }
4115 void LockWrite() { m_Mutex.Lock(); }
4116 void UnlockWrite() { m_Mutex.Unlock(); }
4117 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4121 #define VMA_RW_MUTEX VmaRWMutex
4122 #endif // #if VMA_USE_STL_SHARED_MUTEX
4123 #endif // #ifndef VMA_RW_MUTEX
4128 #ifndef VMA_ATOMIC_UINT32
4130 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4133 #ifndef VMA_ATOMIC_UINT64
4135 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4138 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4143 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4146 #ifndef VMA_DEBUG_ALIGNMENT
4151 #define VMA_DEBUG_ALIGNMENT (1)
4154 #ifndef VMA_DEBUG_MARGIN
4159 #define VMA_DEBUG_MARGIN (0)
4162 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4167 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4170 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4176 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4179 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4184 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4187 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4192 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4195 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4196 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4200 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4201 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4205 #ifndef VMA_CLASS_NO_COPY
4206 #define VMA_CLASS_NO_COPY(className) \
4208 className(const className&) = delete; \
4209 className& operator=(const className&) = delete;
4212 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4215 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4217 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4218 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4226 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4227 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4228 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4230 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4232 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4233 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4236 static inline uint32_t VmaCountBitsSet(uint32_t v)
4238 uint32_t c = v - ((v >> 1) & 0x55555555);
4239 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4240 c = ((c >> 4) + c) & 0x0F0F0F0F;
4241 c = ((c >> 8) + c) & 0x00FF00FF;
4242 c = ((c >> 16) + c) & 0x0000FFFF;
4248 template <
typename T>
4249 static inline T VmaAlignUp(T val, T align)
4251 return (val + align - 1) / align * align;
4255 template <
typename T>
4256 static inline T VmaAlignDown(T val, T align)
4258 return val / align * align;
4262 template <
typename T>
4263 static inline T VmaRoundDiv(T x, T y)
4265 return (x + (y / (T)2)) / y;
4273 template <
typename T>
4274 inline bool VmaIsPow2(T x)
4276 return (x & (x-1)) == 0;
4280 static inline uint32_t VmaNextPow2(uint32_t v)
4291 static inline uint64_t VmaNextPow2(uint64_t v)
4305 static inline uint32_t VmaPrevPow2(uint32_t v)
4315 static inline uint64_t VmaPrevPow2(uint64_t v)
4327 static inline bool VmaStrIsEmpty(
const char* pStr)
4329 return pStr == VMA_NULL || *pStr ==
'\0';
4332 #if VMA_STATS_STRING_ENABLED
4334 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4350 #endif // #if VMA_STATS_STRING_ENABLED
4354 template<
typename Iterator,
typename Compare>
4355 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4357 Iterator centerValue = end; --centerValue;
4358 Iterator insertIndex = beg;
4359 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4361 if(cmp(*memTypeIndex, *centerValue))
4363 if(insertIndex != memTypeIndex)
4365 VMA_SWAP(*memTypeIndex, *insertIndex);
4370 if(insertIndex != centerValue)
4372 VMA_SWAP(*insertIndex, *centerValue);
4377 template<
typename Iterator,
typename Compare>
4378 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4382 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4383 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4384 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4388 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4390 #endif // #ifndef VMA_SORT
4399 static inline bool VmaBlocksOnSamePage(
4400 VkDeviceSize resourceAOffset,
4401 VkDeviceSize resourceASize,
4402 VkDeviceSize resourceBOffset,
4403 VkDeviceSize pageSize)
4405 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4406 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4407 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4408 VkDeviceSize resourceBStart = resourceBOffset;
4409 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4410 return resourceAEndPage == resourceBStartPage;
4413 enum VmaSuballocationType
4415 VMA_SUBALLOCATION_TYPE_FREE = 0,
4416 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4417 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4418 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4419 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4420 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4421 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4430 static inline bool VmaIsBufferImageGranularityConflict(
4431 VmaSuballocationType suballocType1,
4432 VmaSuballocationType suballocType2)
4434 if(suballocType1 > suballocType2)
4436 VMA_SWAP(suballocType1, suballocType2);
4439 switch(suballocType1)
4441 case VMA_SUBALLOCATION_TYPE_FREE:
4443 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4445 case VMA_SUBALLOCATION_TYPE_BUFFER:
4447 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4448 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4449 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4451 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4452 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4453 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4454 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4456 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4457 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4465 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4467 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4468 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4469 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4470 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4472 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4479 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4481 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4482 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4483 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4484 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4486 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4499 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4501 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4502 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4503 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4504 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4510 VMA_CLASS_NO_COPY(VmaMutexLock)
4512 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4513 m_pMutex(useMutex ? &mutex : VMA_NULL)
4514 {
if(m_pMutex) { m_pMutex->Lock(); } }
4516 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4518 VMA_MUTEX* m_pMutex;
4522 struct VmaMutexLockRead
4524 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4526 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4527 m_pMutex(useMutex ? &mutex : VMA_NULL)
4528 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4529 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4531 VMA_RW_MUTEX* m_pMutex;
4535 struct VmaMutexLockWrite
4537 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4539 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4540 m_pMutex(useMutex ? &mutex : VMA_NULL)
4541 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4542 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4544 VMA_RW_MUTEX* m_pMutex;
4547 #if VMA_DEBUG_GLOBAL_MUTEX
4548 static VMA_MUTEX gDebugGlobalMutex;
4549 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4551 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4555 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4566 template <
typename CmpLess,
typename IterT,
typename KeyT>
4567 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4569 size_t down = 0, up = (end - beg);
4572 const size_t mid = (down + up) / 2;
4573 if(cmp(*(beg+mid), key))
4585 template<
typename CmpLess,
typename IterT,
typename KeyT>
4586 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4588 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4589 beg, end, value, cmp);
4591 (!cmp(*it, value) && !cmp(value, *it)))
4603 template<
typename T>
4604 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4606 for(uint32_t i = 0; i < count; ++i)
4608 const T iPtr = arr[i];
4609 if(iPtr == VMA_NULL)
4613 for(uint32_t j = i + 1; j < count; ++j)
4624 template<
typename MainT,
typename NewT>
4625 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4627 newStruct->pNext = mainStruct->pNext;
4628 mainStruct->pNext = newStruct;
4634 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4636 void* result = VMA_NULL;
4637 if((pAllocationCallbacks != VMA_NULL) &&
4638 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4640 result = (*pAllocationCallbacks->pfnAllocation)(
4641 pAllocationCallbacks->pUserData,
4644 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4648 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4650 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4654 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4656 if((pAllocationCallbacks != VMA_NULL) &&
4657 (pAllocationCallbacks->pfnFree != VMA_NULL))
4659 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4663 VMA_SYSTEM_FREE(ptr);
4667 template<
typename T>
4668 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4670 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4673 template<
typename T>
4674 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4676 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4679 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4681 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4683 template<
typename T>
4684 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4687 VmaFree(pAllocationCallbacks, ptr);
4690 template<
typename T>
4691 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4695 for(
size_t i = count; i--; )
4699 VmaFree(pAllocationCallbacks, ptr);
4703 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4705 if(srcStr != VMA_NULL)
4707 const size_t len = strlen(srcStr);
4708 char*
const result = vma_new_array(allocs,
char, len + 1);
4709 memcpy(result, srcStr, len + 1);
4718 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4722 const size_t len = strlen(str);
4723 vma_delete_array(allocs, str, len + 1);
4728 template<
typename T>
4729 class VmaStlAllocator
4732 const VkAllocationCallbacks*
const m_pCallbacks;
4733 typedef T value_type;
4735 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4736 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4738 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4739 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4741 template<
typename U>
4742 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4744 return m_pCallbacks == rhs.m_pCallbacks;
4746 template<
typename U>
4747 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4749 return m_pCallbacks != rhs.m_pCallbacks;
4752 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4755 #if VMA_USE_STL_VECTOR
4757 #define VmaVector std::vector
4759 template<
typename T,
typename allocatorT>
4760 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4762 vec.insert(vec.begin() + index, item);
4765 template<
typename T,
typename allocatorT>
4766 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4768 vec.erase(vec.begin() + index);
4771 #else // #if VMA_USE_STL_VECTOR
4776 template<
typename T,
typename AllocatorT>
4780 typedef T value_type;
4782 VmaVector(
const AllocatorT& allocator) :
4783 m_Allocator(allocator),
4790 VmaVector(
size_t count,
const AllocatorT& allocator) :
4791 m_Allocator(allocator),
4792 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4800 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4801 : VmaVector(count, allocator) {}
4803 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4804 m_Allocator(src.m_Allocator),
4805 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4806 m_Count(src.m_Count),
4807 m_Capacity(src.m_Count)
4811 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4817 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4820 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4824 resize(rhs.m_Count);
4827 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4833 bool empty()
const {
return m_Count == 0; }
4834 size_t size()
const {
return m_Count; }
4835 T* data() {
return m_pArray; }
4836 const T* data()
const {
return m_pArray; }
4838 T& operator[](
size_t index)
4840 VMA_HEAVY_ASSERT(index < m_Count);
4841 return m_pArray[index];
4843 const T& operator[](
size_t index)
const
4845 VMA_HEAVY_ASSERT(index < m_Count);
4846 return m_pArray[index];
4851 VMA_HEAVY_ASSERT(m_Count > 0);
4854 const T& front()
const
4856 VMA_HEAVY_ASSERT(m_Count > 0);
4861 VMA_HEAVY_ASSERT(m_Count > 0);
4862 return m_pArray[m_Count - 1];
4864 const T& back()
const
4866 VMA_HEAVY_ASSERT(m_Count > 0);
4867 return m_pArray[m_Count - 1];
4870 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4872 newCapacity = VMA_MAX(newCapacity, m_Count);
4874 if((newCapacity < m_Capacity) && !freeMemory)
4876 newCapacity = m_Capacity;
4879 if(newCapacity != m_Capacity)
4881 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4884 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4886 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4887 m_Capacity = newCapacity;
4888 m_pArray = newArray;
4892 void resize(
size_t newCount,
bool freeMemory =
false)
4894 size_t newCapacity = m_Capacity;
4895 if(newCount > m_Capacity)
4897 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4901 newCapacity = newCount;
4904 if(newCapacity != m_Capacity)
4906 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4907 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4908 if(elementsToCopy != 0)
4910 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4912 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4913 m_Capacity = newCapacity;
4914 m_pArray = newArray;
4920 void clear(
bool freeMemory =
false)
4922 resize(0, freeMemory);
4925 void insert(
size_t index,
const T& src)
4927 VMA_HEAVY_ASSERT(index <= m_Count);
4928 const size_t oldCount = size();
4929 resize(oldCount + 1);
4930 if(index < oldCount)
4932 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4934 m_pArray[index] = src;
4937 void remove(
size_t index)
4939 VMA_HEAVY_ASSERT(index < m_Count);
4940 const size_t oldCount = size();
4941 if(index < oldCount - 1)
4943 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4945 resize(oldCount - 1);
4948 void push_back(
const T& src)
4950 const size_t newIndex = size();
4951 resize(newIndex + 1);
4952 m_pArray[newIndex] = src;
4957 VMA_HEAVY_ASSERT(m_Count > 0);
4961 void push_front(
const T& src)
4968 VMA_HEAVY_ASSERT(m_Count > 0);
4972 typedef T* iterator;
4974 iterator begin() {
return m_pArray; }
4975 iterator end() {
return m_pArray + m_Count; }
4978 AllocatorT m_Allocator;
4984 template<
typename T,
typename allocatorT>
4985 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4987 vec.insert(index, item);
4990 template<
typename T,
typename allocatorT>
4991 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4996 #endif // #if VMA_USE_STL_VECTOR
4998 template<
typename CmpLess,
typename VectorT>
4999 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5001 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5003 vector.data() + vector.size(),
5005 CmpLess()) - vector.data();
5006 VmaVectorInsert(vector, indexToInsert, value);
5007 return indexToInsert;
5010 template<
typename CmpLess,
typename VectorT>
5011 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5014 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5019 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5021 size_t indexToRemove = it - vector.begin();
5022 VmaVectorRemove(vector, indexToRemove);
5039 template<
typename T,
typename AllocatorT,
size_t N>
5040 class VmaSmallVector
5043 typedef T value_type;
5045 VmaSmallVector(
const AllocatorT& allocator) :
5047 m_DynamicArray(allocator)
5050 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5052 m_DynamicArray(count > N ? count : 0, allocator)
5055 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5056 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5057 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5058 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5060 bool empty()
const {
return m_Count == 0; }
5061 size_t size()
const {
return m_Count; }
5062 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5063 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5065 T& operator[](
size_t index)
5067 VMA_HEAVY_ASSERT(index < m_Count);
5068 return data()[index];
5070 const T& operator[](
size_t index)
const
5072 VMA_HEAVY_ASSERT(index < m_Count);
5073 return data()[index];
5078 VMA_HEAVY_ASSERT(m_Count > 0);
5081 const T& front()
const
5083 VMA_HEAVY_ASSERT(m_Count > 0);
5088 VMA_HEAVY_ASSERT(m_Count > 0);
5089 return data()[m_Count - 1];
5091 const T& back()
const
5093 VMA_HEAVY_ASSERT(m_Count > 0);
5094 return data()[m_Count - 1];
5097 void resize(
size_t newCount,
bool freeMemory =
false)
5099 if(newCount > N && m_Count > N)
5102 m_DynamicArray.resize(newCount, freeMemory);
5104 else if(newCount > N && m_Count <= N)
5107 m_DynamicArray.resize(newCount, freeMemory);
5110 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5113 else if(newCount <= N && m_Count > N)
5118 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5120 m_DynamicArray.resize(0, freeMemory);
5129 void clear(
bool freeMemory =
false)
5131 m_DynamicArray.clear(freeMemory);
5135 void insert(
size_t index,
const T& src)
5137 VMA_HEAVY_ASSERT(index <= m_Count);
5138 const size_t oldCount = size();
5139 resize(oldCount + 1);
5140 T*
const dataPtr = data();
5141 if(index < oldCount)
5144 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5146 dataPtr[index] = src;
5149 void remove(
size_t index)
5151 VMA_HEAVY_ASSERT(index < m_Count);
5152 const size_t oldCount = size();
5153 if(index < oldCount - 1)
5156 T*
const dataPtr = data();
5157 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5159 resize(oldCount - 1);
5162 void push_back(
const T& src)
5164 const size_t newIndex = size();
5165 resize(newIndex + 1);
5166 data()[newIndex] = src;
5171 VMA_HEAVY_ASSERT(m_Count > 0);
5175 void push_front(
const T& src)
5182 VMA_HEAVY_ASSERT(m_Count > 0);
5186 typedef T* iterator;
5188 iterator begin() {
return data(); }
5189 iterator end() {
return data() + m_Count; }
5194 VmaVector<T, AllocatorT> m_DynamicArray;
5205 template<
typename T>
5206 class VmaPoolAllocator
5208 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5210 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5211 ~VmaPoolAllocator();
5212 template<
typename... Types> T* Alloc(Types... args);
5218 uint32_t NextFreeIndex;
5219 alignas(T)
char Value[
sizeof(T)];
5226 uint32_t FirstFreeIndex;
5229 const VkAllocationCallbacks* m_pAllocationCallbacks;
5230 const uint32_t m_FirstBlockCapacity;
5231 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5233 ItemBlock& CreateNewBlock();
5236 template<
typename T>
5237 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5238 m_pAllocationCallbacks(pAllocationCallbacks),
5239 m_FirstBlockCapacity(firstBlockCapacity),
5240 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5242 VMA_ASSERT(m_FirstBlockCapacity > 1);
5245 template<
typename T>
5246 VmaPoolAllocator<T>::~VmaPoolAllocator()
5248 for(
size_t i = m_ItemBlocks.size(); i--; )
5249 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5250 m_ItemBlocks.clear();
5253 template<
typename T>
5254 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5256 for(
size_t i = m_ItemBlocks.size(); i--; )
5258 ItemBlock& block = m_ItemBlocks[i];
5260 if(block.FirstFreeIndex != UINT32_MAX)
5262 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5263 block.FirstFreeIndex = pItem->NextFreeIndex;
5264 T* result = (T*)&pItem->Value;
5265 new(result)T(std::forward<Types>(args)...);
5271 ItemBlock& newBlock = CreateNewBlock();
5272 Item*
const pItem = &newBlock.pItems[0];
5273 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5274 T* result = (T*)&pItem->Value;
5275 new(result)T(std::forward<Types>(args)...);
5279 template<
typename T>
5280 void VmaPoolAllocator<T>::Free(T* ptr)
5283 for(
size_t i = m_ItemBlocks.size(); i--; )
5285 ItemBlock& block = m_ItemBlocks[i];
5289 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5292 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5295 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5296 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5297 block.FirstFreeIndex = index;
5301 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5304 template<
typename T>
5305 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5307 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5308 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5310 const ItemBlock newBlock = {
5311 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5315 m_ItemBlocks.push_back(newBlock);
5318 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5319 newBlock.pItems[i].NextFreeIndex = i + 1;
5320 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5321 return m_ItemBlocks.back();
5327 #if VMA_USE_STL_LIST
5329 #define VmaList std::list
5331 #else // #if VMA_USE_STL_LIST
5333 template<
typename T>
5342 template<
typename T>
5345 VMA_CLASS_NO_COPY(VmaRawList)
5347 typedef VmaListItem<T> ItemType;
5349 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5353 size_t GetCount()
const {
return m_Count; }
5354 bool IsEmpty()
const {
return m_Count == 0; }
5356 ItemType* Front() {
return m_pFront; }
5357 const ItemType* Front()
const {
return m_pFront; }
5358 ItemType* Back() {
return m_pBack; }
5359 const ItemType* Back()
const {
return m_pBack; }
5361 ItemType* PushBack();
5362 ItemType* PushFront();
5363 ItemType* PushBack(
const T& value);
5364 ItemType* PushFront(
const T& value);
5369 ItemType* InsertBefore(ItemType* pItem);
5371 ItemType* InsertAfter(ItemType* pItem);
5373 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5374 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5376 void Remove(ItemType* pItem);
5379 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5380 VmaPoolAllocator<ItemType> m_ItemAllocator;
5386 template<
typename T>
5387 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5388 m_pAllocationCallbacks(pAllocationCallbacks),
5389 m_ItemAllocator(pAllocationCallbacks, 128),
5396 template<
typename T>
5397 VmaRawList<T>::~VmaRawList()
5403 template<
typename T>
5404 void VmaRawList<T>::Clear()
5406 if(IsEmpty() ==
false)
5408 ItemType* pItem = m_pBack;
5409 while(pItem != VMA_NULL)
5411 ItemType*
const pPrevItem = pItem->pPrev;
5412 m_ItemAllocator.Free(pItem);
5415 m_pFront = VMA_NULL;
5421 template<
typename T>
5422 VmaListItem<T>* VmaRawList<T>::PushBack()
5424 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5425 pNewItem->pNext = VMA_NULL;
5428 pNewItem->pPrev = VMA_NULL;
5429 m_pFront = pNewItem;
5435 pNewItem->pPrev = m_pBack;
5436 m_pBack->pNext = pNewItem;
5443 template<
typename T>
5444 VmaListItem<T>* VmaRawList<T>::PushFront()
5446 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5447 pNewItem->pPrev = VMA_NULL;
5450 pNewItem->pNext = VMA_NULL;
5451 m_pFront = pNewItem;
5457 pNewItem->pNext = m_pFront;
5458 m_pFront->pPrev = pNewItem;
5459 m_pFront = pNewItem;
5465 template<
typename T>
5466 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5468 ItemType*
const pNewItem = PushBack();
5469 pNewItem->Value = value;
5473 template<
typename T>
5474 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5476 ItemType*
const pNewItem = PushFront();
5477 pNewItem->Value = value;
5481 template<
typename T>
5482 void VmaRawList<T>::PopBack()
5484 VMA_HEAVY_ASSERT(m_Count > 0);
5485 ItemType*
const pBackItem = m_pBack;
5486 ItemType*
const pPrevItem = pBackItem->pPrev;
5487 if(pPrevItem != VMA_NULL)
5489 pPrevItem->pNext = VMA_NULL;
5491 m_pBack = pPrevItem;
5492 m_ItemAllocator.Free(pBackItem);
5496 template<
typename T>
5497 void VmaRawList<T>::PopFront()
5499 VMA_HEAVY_ASSERT(m_Count > 0);
5500 ItemType*
const pFrontItem = m_pFront;
5501 ItemType*
const pNextItem = pFrontItem->pNext;
5502 if(pNextItem != VMA_NULL)
5504 pNextItem->pPrev = VMA_NULL;
5506 m_pFront = pNextItem;
5507 m_ItemAllocator.Free(pFrontItem);
5511 template<
typename T>
5512 void VmaRawList<T>::Remove(ItemType* pItem)
5514 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5515 VMA_HEAVY_ASSERT(m_Count > 0);
5517 if(pItem->pPrev != VMA_NULL)
5519 pItem->pPrev->pNext = pItem->pNext;
5523 VMA_HEAVY_ASSERT(m_pFront == pItem);
5524 m_pFront = pItem->pNext;
5527 if(pItem->pNext != VMA_NULL)
5529 pItem->pNext->pPrev = pItem->pPrev;
5533 VMA_HEAVY_ASSERT(m_pBack == pItem);
5534 m_pBack = pItem->pPrev;
5537 m_ItemAllocator.Free(pItem);
5541 template<
typename T>
5542 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5544 if(pItem != VMA_NULL)
5546 ItemType*
const prevItem = pItem->pPrev;
5547 ItemType*
const newItem = m_ItemAllocator.Alloc();
5548 newItem->pPrev = prevItem;
5549 newItem->pNext = pItem;
5550 pItem->pPrev = newItem;
5551 if(prevItem != VMA_NULL)
5553 prevItem->pNext = newItem;
5557 VMA_HEAVY_ASSERT(m_pFront == pItem);
5567 template<
typename T>
5568 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5570 if(pItem != VMA_NULL)
5572 ItemType*
const nextItem = pItem->pNext;
5573 ItemType*
const newItem = m_ItemAllocator.Alloc();
5574 newItem->pNext = nextItem;
5575 newItem->pPrev = pItem;
5576 pItem->pNext = newItem;
5577 if(nextItem != VMA_NULL)
5579 nextItem->pPrev = newItem;
5583 VMA_HEAVY_ASSERT(m_pBack == pItem);
5593 template<
typename T>
5594 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5596 ItemType*
const newItem = InsertBefore(pItem);
5597 newItem->Value = value;
5601 template<
typename T>
5602 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5604 ItemType*
const newItem = InsertAfter(pItem);
5605 newItem->Value = value;
5609 template<
typename T,
typename AllocatorT>
5612 VMA_CLASS_NO_COPY(VmaList)
5623 T& operator*()
const
5625 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5626 return m_pItem->Value;
5628 T* operator->()
const
5630 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5631 return &m_pItem->Value;
5634 iterator& operator++()
5636 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5637 m_pItem = m_pItem->pNext;
5640 iterator& operator--()
5642 if(m_pItem != VMA_NULL)
5644 m_pItem = m_pItem->pPrev;
5648 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5649 m_pItem = m_pList->Back();
5654 iterator operator++(
int)
5656 iterator result = *
this;
5660 iterator operator--(
int)
5662 iterator result = *
this;
5667 bool operator==(
const iterator& rhs)
const
5669 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5670 return m_pItem == rhs.m_pItem;
5672 bool operator!=(
const iterator& rhs)
const
5674 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5675 return m_pItem != rhs.m_pItem;
5679 VmaRawList<T>* m_pList;
5680 VmaListItem<T>* m_pItem;
5682 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5688 friend class VmaList<T, AllocatorT>;
5691 class const_iterator
5700 const_iterator(
const iterator& src) :
5701 m_pList(src.m_pList),
5702 m_pItem(src.m_pItem)
5706 const T& operator*()
const
5708 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5709 return m_pItem->Value;
5711 const T* operator->()
const
5713 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5714 return &m_pItem->Value;
5717 const_iterator& operator++()
5719 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5720 m_pItem = m_pItem->pNext;
5723 const_iterator& operator--()
5725 if(m_pItem != VMA_NULL)
5727 m_pItem = m_pItem->pPrev;
5731 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5732 m_pItem = m_pList->Back();
5737 const_iterator operator++(
int)
5739 const_iterator result = *
this;
5743 const_iterator operator--(
int)
5745 const_iterator result = *
this;
5750 bool operator==(
const const_iterator& rhs)
const
5752 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5753 return m_pItem == rhs.m_pItem;
5755 bool operator!=(
const const_iterator& rhs)
const
5757 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5758 return m_pItem != rhs.m_pItem;
5762 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5768 const VmaRawList<T>* m_pList;
5769 const VmaListItem<T>* m_pItem;
5771 friend class VmaList<T, AllocatorT>;
5774 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5776 bool empty()
const {
return m_RawList.IsEmpty(); }
5777 size_t size()
const {
return m_RawList.GetCount(); }
5779 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5780 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5782 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5783 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5785 void clear() { m_RawList.Clear(); }
5786 void push_back(
const T& value) { m_RawList.PushBack(value); }
5787 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5788 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5791 VmaRawList<T> m_RawList;
5794 #endif // #if VMA_USE_STL_LIST
5802 #if VMA_USE_STL_UNORDERED_MAP
5804 #define VmaPair std::pair
5806 #define VMA_MAP_TYPE(KeyT, ValueT) \
5807 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5809 #else // #if VMA_USE_STL_UNORDERED_MAP
5811 template<
typename T1,
typename T2>
5817 VmaPair() : first(), second() { }
5818 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5824 template<
typename KeyT,
typename ValueT>
5828 typedef VmaPair<KeyT, ValueT> PairType;
5829 typedef PairType* iterator;
5831 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5833 iterator begin() {
return m_Vector.begin(); }
5834 iterator end() {
return m_Vector.end(); }
5836 void insert(
const PairType& pair);
5837 iterator find(
const KeyT& key);
5838 void erase(iterator it);
5841 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5844 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5846 template<
typename FirstT,
typename SecondT>
5847 struct VmaPairFirstLess
5849 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5851 return lhs.first < rhs.first;
5853 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5855 return lhs.first < rhsFirst;
5859 template<
typename KeyT,
typename ValueT>
5860 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5862 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5864 m_Vector.data() + m_Vector.size(),
5866 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5867 VmaVectorInsert(m_Vector, indexToInsert, pair);
5870 template<
typename KeyT,
typename ValueT>
5871 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5873 PairType* it = VmaBinaryFindFirstNotLess(
5875 m_Vector.data() + m_Vector.size(),
5877 VmaPairFirstLess<KeyT, ValueT>());
5878 if((it != m_Vector.end()) && (it->first == key))
5884 return m_Vector.end();
5888 template<
typename KeyT,
typename ValueT>
5889 void VmaMap<KeyT, ValueT>::erase(iterator it)
5891 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5894 #endif // #if VMA_USE_STL_UNORDERED_MAP
5900 class VmaDeviceMemoryBlock;
5902 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5904 struct VmaAllocation_T
5907 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5911 FLAG_USER_DATA_STRING = 0x01,
5915 enum ALLOCATION_TYPE
5917 ALLOCATION_TYPE_NONE,
5918 ALLOCATION_TYPE_BLOCK,
5919 ALLOCATION_TYPE_DEDICATED,
5926 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5929 m_pUserData{VMA_NULL},
5930 m_LastUseFrameIndex{currentFrameIndex},
5931 m_MemoryTypeIndex{0},
5932 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5933 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5935 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5937 #if VMA_STATS_STRING_ENABLED
5938 m_CreationFrameIndex = currentFrameIndex;
5939 m_BufferImageUsage = 0;
5945 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5948 VMA_ASSERT(m_pUserData == VMA_NULL);
5951 void InitBlockAllocation(
5952 VmaDeviceMemoryBlock* block,
5953 VkDeviceSize offset,
5954 VkDeviceSize alignment,
5956 uint32_t memoryTypeIndex,
5957 VmaSuballocationType suballocationType,
5961 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5962 VMA_ASSERT(block != VMA_NULL);
5963 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5964 m_Alignment = alignment;
5966 m_MemoryTypeIndex = memoryTypeIndex;
5967 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5968 m_SuballocationType = (uint8_t)suballocationType;
5969 m_BlockAllocation.m_Block = block;
5970 m_BlockAllocation.m_Offset = offset;
5971 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5976 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5977 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5978 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5979 m_MemoryTypeIndex = 0;
5980 m_BlockAllocation.m_Block = VMA_NULL;
5981 m_BlockAllocation.m_Offset = 0;
5982 m_BlockAllocation.m_CanBecomeLost =
true;
5985 void ChangeBlockAllocation(
5987 VmaDeviceMemoryBlock* block,
5988 VkDeviceSize offset);
5990 void ChangeOffset(VkDeviceSize newOffset);
5993 void InitDedicatedAllocation(
5994 uint32_t memoryTypeIndex,
5995 VkDeviceMemory hMemory,
5996 VmaSuballocationType suballocationType,
6000 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6001 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6002 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6005 m_MemoryTypeIndex = memoryTypeIndex;
6006 m_SuballocationType = (uint8_t)suballocationType;
6007 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6008 m_DedicatedAllocation.m_hMemory = hMemory;
6009 m_DedicatedAllocation.m_pMappedData = pMappedData;
6012 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6013 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6014 VkDeviceSize GetSize()
const {
return m_Size; }
6015 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6016 void* GetUserData()
const {
return m_pUserData; }
6017 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6018 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6020 VmaDeviceMemoryBlock* GetBlock()
const
6022 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6023 return m_BlockAllocation.m_Block;
6025 VkDeviceSize GetOffset()
const;
6026 VkDeviceMemory GetMemory()
const;
6027 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6028 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6029 void* GetMappedData()
const;
6030 bool CanBecomeLost()
const;
6032 uint32_t GetLastUseFrameIndex()
const
6034 return m_LastUseFrameIndex.load();
6036 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6038 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6048 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6050 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6052 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6063 void BlockAllocMap();
6064 void BlockAllocUnmap();
6065 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6068 #if VMA_STATS_STRING_ENABLED
6069 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6070 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6072 void InitBufferImageUsage(uint32_t bufferImageUsage)
6074 VMA_ASSERT(m_BufferImageUsage == 0);
6075 m_BufferImageUsage = bufferImageUsage;
6078 void PrintParameters(
class VmaJsonWriter& json)
const;
6082 VkDeviceSize m_Alignment;
6083 VkDeviceSize m_Size;
6085 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6086 uint32_t m_MemoryTypeIndex;
6088 uint8_t m_SuballocationType;
6095 struct BlockAllocation
6097 VmaDeviceMemoryBlock* m_Block;
6098 VkDeviceSize m_Offset;
6099 bool m_CanBecomeLost;
6103 struct DedicatedAllocation
6105 VkDeviceMemory m_hMemory;
6106 void* m_pMappedData;
6112 BlockAllocation m_BlockAllocation;
6114 DedicatedAllocation m_DedicatedAllocation;
6117 #if VMA_STATS_STRING_ENABLED
6118 uint32_t m_CreationFrameIndex;
6119 uint32_t m_BufferImageUsage;
6129 struct VmaSuballocation
6131 VkDeviceSize offset;
6134 VmaSuballocationType type;
6138 struct VmaSuballocationOffsetLess
6140 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6142 return lhs.offset < rhs.offset;
6145 struct VmaSuballocationOffsetGreater
6147 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6149 return lhs.offset > rhs.offset;
6153 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6156 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6158 enum class VmaAllocationRequestType
6180 struct VmaAllocationRequest
6182 VkDeviceSize offset;
6183 VkDeviceSize sumFreeSize;
6184 VkDeviceSize sumItemSize;
6185 VmaSuballocationList::iterator item;
6186 size_t itemsToMakeLostCount;
6188 VmaAllocationRequestType type;
6190 VkDeviceSize CalcCost()
const
6192 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6200 class VmaBlockMetadata
6204 virtual ~VmaBlockMetadata() { }
6205 virtual void Init(VkDeviceSize size) { m_Size = size; }
6208 virtual bool Validate()
const = 0;
6209 VkDeviceSize GetSize()
const {
return m_Size; }
6210 virtual size_t GetAllocationCount()
const = 0;
6211 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6212 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6214 virtual bool IsEmpty()
const = 0;
6216 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6218 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6220 #if VMA_STATS_STRING_ENABLED
6221 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6227 virtual bool CreateAllocationRequest(
6228 uint32_t currentFrameIndex,
6229 uint32_t frameInUseCount,
6230 VkDeviceSize bufferImageGranularity,
6231 VkDeviceSize allocSize,
6232 VkDeviceSize allocAlignment,
6234 VmaSuballocationType allocType,
6235 bool canMakeOtherLost,
6238 VmaAllocationRequest* pAllocationRequest) = 0;
6240 virtual bool MakeRequestedAllocationsLost(
6241 uint32_t currentFrameIndex,
6242 uint32_t frameInUseCount,
6243 VmaAllocationRequest* pAllocationRequest) = 0;
6245 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6247 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6251 const VmaAllocationRequest& request,
6252 VmaSuballocationType type,
6253 VkDeviceSize allocSize,
6258 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6261 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6263 #if VMA_STATS_STRING_ENABLED
6264 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6265 VkDeviceSize unusedBytes,
6266 size_t allocationCount,
6267 size_t unusedRangeCount)
const;
6268 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6269 VkDeviceSize offset,
6271 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6272 VkDeviceSize offset,
6273 VkDeviceSize size)
const;
6274 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6278 VkDeviceSize m_Size;
6279 const VkAllocationCallbacks* m_pAllocationCallbacks;
6282 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6283 VMA_ASSERT(0 && "Validation failed: " #cond); \
6287 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6289 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6292 virtual ~VmaBlockMetadata_Generic();
6293 virtual void Init(VkDeviceSize size);
6295 virtual bool Validate()
const;
6296 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6297 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6298 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6299 virtual bool IsEmpty()
const;
6301 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6302 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6304 #if VMA_STATS_STRING_ENABLED
6305 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6308 virtual bool CreateAllocationRequest(
6309 uint32_t currentFrameIndex,
6310 uint32_t frameInUseCount,
6311 VkDeviceSize bufferImageGranularity,
6312 VkDeviceSize allocSize,
6313 VkDeviceSize allocAlignment,
6315 VmaSuballocationType allocType,
6316 bool canMakeOtherLost,
6318 VmaAllocationRequest* pAllocationRequest);
6320 virtual bool MakeRequestedAllocationsLost(
6321 uint32_t currentFrameIndex,
6322 uint32_t frameInUseCount,
6323 VmaAllocationRequest* pAllocationRequest);
6325 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6327 virtual VkResult CheckCorruption(
const void* pBlockData);
6330 const VmaAllocationRequest& request,
6331 VmaSuballocationType type,
6332 VkDeviceSize allocSize,
6336 virtual void FreeAtOffset(VkDeviceSize offset);
6341 bool IsBufferImageGranularityConflictPossible(
6342 VkDeviceSize bufferImageGranularity,
6343 VmaSuballocationType& inOutPrevSuballocType)
const;
6346 friend class VmaDefragmentationAlgorithm_Generic;
6347 friend class VmaDefragmentationAlgorithm_Fast;
6349 uint32_t m_FreeCount;
6350 VkDeviceSize m_SumFreeSize;
6351 VmaSuballocationList m_Suballocations;
6354 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6356 bool ValidateFreeSuballocationList()
const;
6360 bool CheckAllocation(
6361 uint32_t currentFrameIndex,
6362 uint32_t frameInUseCount,
6363 VkDeviceSize bufferImageGranularity,
6364 VkDeviceSize allocSize,
6365 VkDeviceSize allocAlignment,
6366 VmaSuballocationType allocType,
6367 VmaSuballocationList::const_iterator suballocItem,
6368 bool canMakeOtherLost,
6369 VkDeviceSize* pOffset,
6370 size_t* itemsToMakeLostCount,
6371 VkDeviceSize* pSumFreeSize,
6372 VkDeviceSize* pSumItemSize)
const;
6374 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6378 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6381 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6384 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6465 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6467 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6470 virtual ~VmaBlockMetadata_Linear();
6471 virtual void Init(VkDeviceSize size);
6473 virtual bool Validate()
const;
6474 virtual size_t GetAllocationCount()
const;
6475 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6476 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6477 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6479 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6480 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6482 #if VMA_STATS_STRING_ENABLED
6483 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6486 virtual bool CreateAllocationRequest(
6487 uint32_t currentFrameIndex,
6488 uint32_t frameInUseCount,
6489 VkDeviceSize bufferImageGranularity,
6490 VkDeviceSize allocSize,
6491 VkDeviceSize allocAlignment,
6493 VmaSuballocationType allocType,
6494 bool canMakeOtherLost,
6496 VmaAllocationRequest* pAllocationRequest);
6498 virtual bool MakeRequestedAllocationsLost(
6499 uint32_t currentFrameIndex,
6500 uint32_t frameInUseCount,
6501 VmaAllocationRequest* pAllocationRequest);
6503 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6505 virtual VkResult CheckCorruption(
const void* pBlockData);
6508 const VmaAllocationRequest& request,
6509 VmaSuballocationType type,
6510 VkDeviceSize allocSize,
6514 virtual void FreeAtOffset(VkDeviceSize offset);
6524 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6526 enum SECOND_VECTOR_MODE
6528 SECOND_VECTOR_EMPTY,
6533 SECOND_VECTOR_RING_BUFFER,
6539 SECOND_VECTOR_DOUBLE_STACK,
6542 VkDeviceSize m_SumFreeSize;
6543 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6544 uint32_t m_1stVectorIndex;
6545 SECOND_VECTOR_MODE m_2ndVectorMode;
6547 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6548 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6549 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6550 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6553 size_t m_1stNullItemsBeginCount;
6555 size_t m_1stNullItemsMiddleCount;
6557 size_t m_2ndNullItemsCount;
6559 bool ShouldCompact1st()
const;
6560 void CleanupAfterFree();
6562 bool CreateAllocationRequest_LowerAddress(
6563 uint32_t currentFrameIndex,
6564 uint32_t frameInUseCount,
6565 VkDeviceSize bufferImageGranularity,
6566 VkDeviceSize allocSize,
6567 VkDeviceSize allocAlignment,
6568 VmaSuballocationType allocType,
6569 bool canMakeOtherLost,
6571 VmaAllocationRequest* pAllocationRequest);
6572 bool CreateAllocationRequest_UpperAddress(
6573 uint32_t currentFrameIndex,
6574 uint32_t frameInUseCount,
6575 VkDeviceSize bufferImageGranularity,
6576 VkDeviceSize allocSize,
6577 VkDeviceSize allocAlignment,
6578 VmaSuballocationType allocType,
6579 bool canMakeOtherLost,
6581 VmaAllocationRequest* pAllocationRequest);
6595 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6597 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6600 virtual ~VmaBlockMetadata_Buddy();
6601 virtual void Init(VkDeviceSize size);
6603 virtual bool Validate()
const;
6604 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6605 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6606 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6607 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6609 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6610 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6612 #if VMA_STATS_STRING_ENABLED
6613 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6616 virtual bool CreateAllocationRequest(
6617 uint32_t currentFrameIndex,
6618 uint32_t frameInUseCount,
6619 VkDeviceSize bufferImageGranularity,
6620 VkDeviceSize allocSize,
6621 VkDeviceSize allocAlignment,
6623 VmaSuballocationType allocType,
6624 bool canMakeOtherLost,
6626 VmaAllocationRequest* pAllocationRequest);
6628 virtual bool MakeRequestedAllocationsLost(
6629 uint32_t currentFrameIndex,
6630 uint32_t frameInUseCount,
6631 VmaAllocationRequest* pAllocationRequest);
6633 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6635 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6638 const VmaAllocationRequest& request,
6639 VmaSuballocationType type,
6640 VkDeviceSize allocSize,
6643 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6644 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6647 static const VkDeviceSize MIN_NODE_SIZE = 32;
6648 static const size_t MAX_LEVELS = 30;
6650 struct ValidationContext
6652 size_t calculatedAllocationCount;
6653 size_t calculatedFreeCount;
6654 VkDeviceSize calculatedSumFreeSize;
6656 ValidationContext() :
6657 calculatedAllocationCount(0),
6658 calculatedFreeCount(0),
6659 calculatedSumFreeSize(0) { }
6664 VkDeviceSize offset;
6694 VkDeviceSize m_UsableSize;
6695 uint32_t m_LevelCount;
6701 } m_FreeList[MAX_LEVELS];
6703 size_t m_AllocationCount;
6707 VkDeviceSize m_SumFreeSize;
6709 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6710 void DeleteNode(Node* node);
6711 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6712 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6713 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6715 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6716 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6720 void AddToFreeListFront(uint32_t level, Node* node);
6724 void RemoveFromFreeList(uint32_t level, Node* node);
6726 #if VMA_STATS_STRING_ENABLED
6727 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6737 class VmaDeviceMemoryBlock
6739 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6741 VmaBlockMetadata* m_pMetadata;
6745 ~VmaDeviceMemoryBlock()
6747 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6748 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6755 uint32_t newMemoryTypeIndex,
6756 VkDeviceMemory newMemory,
6757 VkDeviceSize newSize,
6759 uint32_t algorithm);
6763 VmaPool GetParentPool()
const {
return m_hParentPool; }
6764 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6765 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6766 uint32_t GetId()
const {
return m_Id; }
6767 void* GetMappedData()
const {
return m_pMappedData; }
6770 bool Validate()
const;
6775 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6778 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6779 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6781 VkResult BindBufferMemory(
6784 VkDeviceSize allocationLocalOffset,
6787 VkResult BindImageMemory(
6790 VkDeviceSize allocationLocalOffset,
6796 uint32_t m_MemoryTypeIndex;
6798 VkDeviceMemory m_hMemory;
6806 uint32_t m_MapCount;
6807 void* m_pMappedData;
6810 struct VmaPointerLess
6812 bool operator()(
const void* lhs,
const void* rhs)
const
6818 struct VmaDefragmentationMove
6820 size_t srcBlockIndex;
6821 size_t dstBlockIndex;
6822 VkDeviceSize srcOffset;
6823 VkDeviceSize dstOffset;
6826 VmaDeviceMemoryBlock* pSrcBlock;
6827 VmaDeviceMemoryBlock* pDstBlock;
6830 class VmaDefragmentationAlgorithm;
6838 struct VmaBlockVector
6840 VMA_CLASS_NO_COPY(VmaBlockVector)
6845 uint32_t memoryTypeIndex,
6846 VkDeviceSize preferredBlockSize,
6847 size_t minBlockCount,
6848 size_t maxBlockCount,
6849 VkDeviceSize bufferImageGranularity,
6850 uint32_t frameInUseCount,
6851 bool explicitBlockSize,
6852 uint32_t algorithm);
6855 VkResult CreateMinBlocks();
6857 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6858 VmaPool GetParentPool()
const {
return m_hParentPool; }
6859 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6860 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6861 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6862 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6863 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6864 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6869 bool IsCorruptionDetectionEnabled()
const;
6872 uint32_t currentFrameIndex,
6874 VkDeviceSize alignment,
6876 VmaSuballocationType suballocType,
6877 size_t allocationCount,
6885 #if VMA_STATS_STRING_ENABLED
6886 void PrintDetailedMap(
class VmaJsonWriter& json);
6889 void MakePoolAllocationsLost(
6890 uint32_t currentFrameIndex,
6891 size_t* pLostAllocationCount);
6892 VkResult CheckCorruption();
6896 class VmaBlockVectorDefragmentationContext* pCtx,
6898 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6899 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6900 VkCommandBuffer commandBuffer);
6901 void DefragmentationEnd(
6902 class VmaBlockVectorDefragmentationContext* pCtx,
6906 uint32_t ProcessDefragmentations(
6907 class VmaBlockVectorDefragmentationContext *pCtx,
6910 void CommitDefragmentations(
6911 class VmaBlockVectorDefragmentationContext *pCtx,
6917 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6918 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6919 size_t CalcAllocationCount()
const;
6920 bool IsBufferImageGranularityConflictPossible()
const;
6923 friend class VmaDefragmentationAlgorithm_Generic;
6927 const uint32_t m_MemoryTypeIndex;
6928 const VkDeviceSize m_PreferredBlockSize;
6929 const size_t m_MinBlockCount;
6930 const size_t m_MaxBlockCount;
6931 const VkDeviceSize m_BufferImageGranularity;
6932 const uint32_t m_FrameInUseCount;
6933 const bool m_ExplicitBlockSize;
6934 const uint32_t m_Algorithm;
6935 VMA_RW_MUTEX m_Mutex;
6939 bool m_HasEmptyBlock;
6941 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6942 uint32_t m_NextBlockId;
6944 VkDeviceSize CalcMaxBlockSize()
const;
6947 void Remove(VmaDeviceMemoryBlock* pBlock);
6951 void IncrementallySortBlocks();
6953 VkResult AllocatePage(
6954 uint32_t currentFrameIndex,
6956 VkDeviceSize alignment,
6958 VmaSuballocationType suballocType,
6962 VkResult AllocateFromBlock(
6963 VmaDeviceMemoryBlock* pBlock,
6964 uint32_t currentFrameIndex,
6966 VkDeviceSize alignment,
6969 VmaSuballocationType suballocType,
6973 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6976 void ApplyDefragmentationMovesCpu(
6977 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6978 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6980 void ApplyDefragmentationMovesGpu(
6981 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6982 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6983 VkCommandBuffer commandBuffer);
6991 void UpdateHasEmptyBlock();
6996 VMA_CLASS_NO_COPY(VmaPool_T)
6998 VmaBlockVector m_BlockVector;
7003 VkDeviceSize preferredBlockSize);
7006 uint32_t GetId()
const {
return m_Id; }
7007 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7009 const char* GetName()
const {
return m_Name; }
7010 void SetName(
const char* pName);
7012 #if VMA_STATS_STRING_ENABLED
7028 class VmaDefragmentationAlgorithm
7030 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7032 VmaDefragmentationAlgorithm(
7034 VmaBlockVector* pBlockVector,
7035 uint32_t currentFrameIndex) :
7036 m_hAllocator(hAllocator),
7037 m_pBlockVector(pBlockVector),
7038 m_CurrentFrameIndex(currentFrameIndex)
7041 virtual ~VmaDefragmentationAlgorithm()
7045 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7046 virtual void AddAll() = 0;
7048 virtual VkResult Defragment(
7049 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7050 VkDeviceSize maxBytesToMove,
7051 uint32_t maxAllocationsToMove,
7054 virtual VkDeviceSize GetBytesMoved()
const = 0;
7055 virtual uint32_t GetAllocationsMoved()
const = 0;
7059 VmaBlockVector*
const m_pBlockVector;
7060 const uint32_t m_CurrentFrameIndex;
7062 struct AllocationInfo
7065 VkBool32* m_pChanged;
7068 m_hAllocation(VK_NULL_HANDLE),
7069 m_pChanged(VMA_NULL)
7073 m_hAllocation(hAlloc),
7074 m_pChanged(pChanged)
7080 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7082 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7084 VmaDefragmentationAlgorithm_Generic(
7086 VmaBlockVector* pBlockVector,
7087 uint32_t currentFrameIndex,
7088 bool overlappingMoveSupported);
7089 virtual ~VmaDefragmentationAlgorithm_Generic();
7091 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7092 virtual void AddAll() { m_AllAllocations =
true; }
7094 virtual VkResult Defragment(
7095 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7096 VkDeviceSize maxBytesToMove,
7097 uint32_t maxAllocationsToMove,
7100 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7101 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7104 uint32_t m_AllocationCount;
7105 bool m_AllAllocations;
7107 VkDeviceSize m_BytesMoved;
7108 uint32_t m_AllocationsMoved;
7110 struct AllocationInfoSizeGreater
7112 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7114 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7118 struct AllocationInfoOffsetGreater
7120 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7122 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7128 size_t m_OriginalBlockIndex;
7129 VmaDeviceMemoryBlock* m_pBlock;
7130 bool m_HasNonMovableAllocations;
7131 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7133 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7134 m_OriginalBlockIndex(SIZE_MAX),
7136 m_HasNonMovableAllocations(true),
7137 m_Allocations(pAllocationCallbacks)
7141 void CalcHasNonMovableAllocations()
7143 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7144 const size_t defragmentAllocCount = m_Allocations.size();
7145 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7148 void SortAllocationsBySizeDescending()
7150 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7153 void SortAllocationsByOffsetDescending()
7155 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7159 struct BlockPointerLess
7161 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7163 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7165 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7167 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7173 struct BlockInfoCompareMoveDestination
7175 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7177 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7181 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7185 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7193 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7194 BlockInfoVector m_Blocks;
7196 VkResult DefragmentRound(
7197 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7198 VkDeviceSize maxBytesToMove,
7199 uint32_t maxAllocationsToMove,
7200 bool freeOldAllocations);
7202 size_t CalcBlocksWithNonMovableCount()
const;
7204 static bool MoveMakesSense(
7205 size_t dstBlockIndex, VkDeviceSize dstOffset,
7206 size_t srcBlockIndex, VkDeviceSize srcOffset);
7209 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7211 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7213 VmaDefragmentationAlgorithm_Fast(
7215 VmaBlockVector* pBlockVector,
7216 uint32_t currentFrameIndex,
7217 bool overlappingMoveSupported);
7218 virtual ~VmaDefragmentationAlgorithm_Fast();
7220 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7221 virtual void AddAll() { m_AllAllocations =
true; }
7223 virtual VkResult Defragment(
7224 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7225 VkDeviceSize maxBytesToMove,
7226 uint32_t maxAllocationsToMove,
7229 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7230 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7235 size_t origBlockIndex;
7238 class FreeSpaceDatabase
7244 s.blockInfoIndex = SIZE_MAX;
7245 for(
size_t i = 0; i < MAX_COUNT; ++i)
7247 m_FreeSpaces[i] = s;
7251 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7253 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7259 size_t bestIndex = SIZE_MAX;
7260 for(
size_t i = 0; i < MAX_COUNT; ++i)
7263 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7268 if(m_FreeSpaces[i].size < size &&
7269 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7275 if(bestIndex != SIZE_MAX)
7277 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7278 m_FreeSpaces[bestIndex].offset = offset;
7279 m_FreeSpaces[bestIndex].size = size;
7283 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7284 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7286 size_t bestIndex = SIZE_MAX;
7287 VkDeviceSize bestFreeSpaceAfter = 0;
7288 for(
size_t i = 0; i < MAX_COUNT; ++i)
7291 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7293 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7295 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7297 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7299 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7302 bestFreeSpaceAfter = freeSpaceAfter;
7308 if(bestIndex != SIZE_MAX)
7310 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7311 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7313 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7316 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7317 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7318 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7323 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7333 static const size_t MAX_COUNT = 4;
7337 size_t blockInfoIndex;
7338 VkDeviceSize offset;
7340 } m_FreeSpaces[MAX_COUNT];
7343 const bool m_OverlappingMoveSupported;
7345 uint32_t m_AllocationCount;
7346 bool m_AllAllocations;
7348 VkDeviceSize m_BytesMoved;
7349 uint32_t m_AllocationsMoved;
7351 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7353 void PreprocessMetadata();
7354 void PostprocessMetadata();
7355 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7358 struct VmaBlockDefragmentationContext
7362 BLOCK_FLAG_USED = 0x00000001,
7368 class VmaBlockVectorDefragmentationContext
7370 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7374 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7375 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7376 uint32_t defragmentationMovesProcessed;
7377 uint32_t defragmentationMovesCommitted;
7378 bool hasDefragmentationPlan;
7380 VmaBlockVectorDefragmentationContext(
7383 VmaBlockVector* pBlockVector,
7384 uint32_t currFrameIndex);
7385 ~VmaBlockVectorDefragmentationContext();
7387 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7388 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7389 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7391 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7392 void AddAll() { m_AllAllocations =
true; }
7401 VmaBlockVector*
const m_pBlockVector;
7402 const uint32_t m_CurrFrameIndex;
7404 VmaDefragmentationAlgorithm* m_pAlgorithm;
7412 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7413 bool m_AllAllocations;
7416 struct VmaDefragmentationContext_T
7419 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7421 VmaDefragmentationContext_T(
7423 uint32_t currFrameIndex,
7426 ~VmaDefragmentationContext_T();
7428 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7429 void AddAllocations(
7430 uint32_t allocationCount,
7432 VkBool32* pAllocationsChanged);
7440 VkResult Defragment(
7441 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7442 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7446 VkResult DefragmentPassEnd();
7450 const uint32_t m_CurrFrameIndex;
7451 const uint32_t m_Flags;
7454 VkDeviceSize m_MaxCpuBytesToMove;
7455 uint32_t m_MaxCpuAllocationsToMove;
7456 VkDeviceSize m_MaxGpuBytesToMove;
7457 uint32_t m_MaxGpuAllocationsToMove;
7460 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7462 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7465 #if VMA_RECORDING_ENABLED
7472 void WriteConfiguration(
7473 const VkPhysicalDeviceProperties& devProps,
7474 const VkPhysicalDeviceMemoryProperties& memProps,
7475 uint32_t vulkanApiVersion,
7476 bool dedicatedAllocationExtensionEnabled,
7477 bool bindMemory2ExtensionEnabled,
7478 bool memoryBudgetExtensionEnabled,
7479 bool deviceCoherentMemoryExtensionEnabled);
7482 void RecordCreateAllocator(uint32_t frameIndex);
7483 void RecordDestroyAllocator(uint32_t frameIndex);
7484 void RecordCreatePool(uint32_t frameIndex,
7487 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7488 void RecordAllocateMemory(uint32_t frameIndex,
7489 const VkMemoryRequirements& vkMemReq,
7492 void RecordAllocateMemoryPages(uint32_t frameIndex,
7493 const VkMemoryRequirements& vkMemReq,
7495 uint64_t allocationCount,
7497 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7498 const VkMemoryRequirements& vkMemReq,
7499 bool requiresDedicatedAllocation,
7500 bool prefersDedicatedAllocation,
7503 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7504 const VkMemoryRequirements& vkMemReq,
7505 bool requiresDedicatedAllocation,
7506 bool prefersDedicatedAllocation,
7509 void RecordFreeMemory(uint32_t frameIndex,
7511 void RecordFreeMemoryPages(uint32_t frameIndex,
7512 uint64_t allocationCount,
7514 void RecordSetAllocationUserData(uint32_t frameIndex,
7516 const void* pUserData);
7517 void RecordCreateLostAllocation(uint32_t frameIndex,
7519 void RecordMapMemory(uint32_t frameIndex,
7521 void RecordUnmapMemory(uint32_t frameIndex,
7523 void RecordFlushAllocation(uint32_t frameIndex,
7524 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7525 void RecordInvalidateAllocation(uint32_t frameIndex,
7526 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7527 void RecordCreateBuffer(uint32_t frameIndex,
7528 const VkBufferCreateInfo& bufCreateInfo,
7531 void RecordCreateImage(uint32_t frameIndex,
7532 const VkImageCreateInfo& imageCreateInfo,
7535 void RecordDestroyBuffer(uint32_t frameIndex,
7537 void RecordDestroyImage(uint32_t frameIndex,
7539 void RecordTouchAllocation(uint32_t frameIndex,
7541 void RecordGetAllocationInfo(uint32_t frameIndex,
7543 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7545 void RecordDefragmentationBegin(uint32_t frameIndex,
7548 void RecordDefragmentationEnd(uint32_t frameIndex,
7550 void RecordSetPoolName(uint32_t frameIndex,
7561 class UserDataString
7565 const char* GetString()
const {
return m_Str; }
7575 VMA_MUTEX m_FileMutex;
7576 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
7578 void GetBasicParams(CallParams& outParams);
7581 template<
typename T>
7582 void PrintPointerList(uint64_t count,
const T* pItems)
7586 fprintf(m_File,
"%p", pItems[0]);
7587 for(uint64_t i = 1; i < count; ++i)
7589 fprintf(m_File,
" %p", pItems[i]);
7594 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7598 #endif // #if VMA_RECORDING_ENABLED
7603 class VmaAllocationObjectAllocator
7605 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7607 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7609 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7614 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7617 struct VmaCurrentBudgetData
7619 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7620 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7622 #if VMA_MEMORY_BUDGET
7623 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7624 VMA_RW_MUTEX m_BudgetMutex;
7625 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7626 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7627 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7628 #endif // #if VMA_MEMORY_BUDGET
7630 VmaCurrentBudgetData()
7632 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7634 m_BlockBytes[heapIndex] = 0;
7635 m_AllocationBytes[heapIndex] = 0;
7636 #if VMA_MEMORY_BUDGET
7637 m_VulkanUsage[heapIndex] = 0;
7638 m_VulkanBudget[heapIndex] = 0;
7639 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7643 #if VMA_MEMORY_BUDGET
7644 m_OperationsSinceBudgetFetch = 0;
7648 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7650 m_AllocationBytes[heapIndex] += allocationSize;
7651 #if VMA_MEMORY_BUDGET
7652 ++m_OperationsSinceBudgetFetch;
7656 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7658 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7659 m_AllocationBytes[heapIndex] -= allocationSize;
7660 #if VMA_MEMORY_BUDGET
7661 ++m_OperationsSinceBudgetFetch;
7667 struct VmaAllocator_T
7669 VMA_CLASS_NO_COPY(VmaAllocator_T)
7672 uint32_t m_VulkanApiVersion;
7673 bool m_UseKhrDedicatedAllocation;
7674 bool m_UseKhrBindMemory2;
7675 bool m_UseExtMemoryBudget;
7676 bool m_UseAmdDeviceCoherentMemory;
7677 bool m_UseKhrBufferDeviceAddress;
7679 VkInstance m_hInstance;
7680 bool m_AllocationCallbacksSpecified;
7681 VkAllocationCallbacks m_AllocationCallbacks;
7683 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7686 uint32_t m_HeapSizeLimitMask;
7688 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7689 VkPhysicalDeviceMemoryProperties m_MemProps;
7692 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7695 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7696 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7697 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7699 VmaCurrentBudgetData m_Budget;
7705 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7707 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7711 return m_VulkanFunctions;
7714 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7716 VkDeviceSize GetBufferImageGranularity()
const
7719 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7720 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7723 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7724 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7726 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7728 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7729 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7732 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7734 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7735 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7738 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7740 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7741 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7742 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7745 bool IsIntegratedGpu()
const
7747 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7750 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7752 #if VMA_RECORDING_ENABLED
7753 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7756 void GetBufferMemoryRequirements(
7758 VkMemoryRequirements& memReq,
7759 bool& requiresDedicatedAllocation,
7760 bool& prefersDedicatedAllocation)
const;
7761 void GetImageMemoryRequirements(
7763 VkMemoryRequirements& memReq,
7764 bool& requiresDedicatedAllocation,
7765 bool& prefersDedicatedAllocation)
const;
7768 VkResult AllocateMemory(
7769 const VkMemoryRequirements& vkMemReq,
7770 bool requiresDedicatedAllocation,
7771 bool prefersDedicatedAllocation,
7772 VkBuffer dedicatedBuffer,
7773 VkBufferUsageFlags dedicatedBufferUsage,
7774 VkImage dedicatedImage,
7776 VmaSuballocationType suballocType,
7777 size_t allocationCount,
7782 size_t allocationCount,
7785 VkResult ResizeAllocation(
7787 VkDeviceSize newSize);
7789 void CalculateStats(
VmaStats* pStats);
7792 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7794 #if VMA_STATS_STRING_ENABLED
7795 void PrintDetailedMap(
class VmaJsonWriter& json);
7798 VkResult DefragmentationBegin(
7802 VkResult DefragmentationEnd(
7805 VkResult DefragmentationPassBegin(
7808 VkResult DefragmentationPassEnd(
7815 void DestroyPool(
VmaPool pool);
7818 void SetCurrentFrameIndex(uint32_t frameIndex);
7819 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7821 void MakePoolAllocationsLost(
7823 size_t* pLostAllocationCount);
7824 VkResult CheckPoolCorruption(
VmaPool hPool);
7825 VkResult CheckCorruption(uint32_t memoryTypeBits);
7830 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7832 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7834 VkResult BindVulkanBuffer(
7835 VkDeviceMemory memory,
7836 VkDeviceSize memoryOffset,
7840 VkResult BindVulkanImage(
7841 VkDeviceMemory memory,
7842 VkDeviceSize memoryOffset,
7849 VkResult BindBufferMemory(
7851 VkDeviceSize allocationLocalOffset,
7854 VkResult BindImageMemory(
7856 VkDeviceSize allocationLocalOffset,
7860 VkResult FlushOrInvalidateAllocation(
7862 VkDeviceSize offset, VkDeviceSize size,
7863 VMA_CACHE_OPERATION op);
7864 VkResult FlushOrInvalidateAllocations(
7865 uint32_t allocationCount,
7867 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
7868 VMA_CACHE_OPERATION op);
7870 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7876 uint32_t GetGpuDefragmentationMemoryTypeBits();
7879 VkDeviceSize m_PreferredLargeHeapBlockSize;
7881 VkPhysicalDevice m_PhysicalDevice;
7882 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7883 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7885 VMA_RW_MUTEX m_PoolsMutex;
7887 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7888 uint32_t m_NextPoolId;
7893 uint32_t m_GlobalMemoryTypeBits;
7895 #if VMA_RECORDING_ENABLED
7896 VmaRecorder* m_pRecorder;
7901 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
7902 void ImportVulkanFunctions_Static();
7907 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
7908 void ImportVulkanFunctions_Dynamic();
7911 void ValidateVulkanFunctions();
7913 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7915 VkResult AllocateMemoryOfType(
7917 VkDeviceSize alignment,
7918 bool dedicatedAllocation,
7919 VkBuffer dedicatedBuffer,
7920 VkBufferUsageFlags dedicatedBufferUsage,
7921 VkImage dedicatedImage,
7923 uint32_t memTypeIndex,
7924 VmaSuballocationType suballocType,
7925 size_t allocationCount,
7929 VkResult AllocateDedicatedMemoryPage(
7931 VmaSuballocationType suballocType,
7932 uint32_t memTypeIndex,
7933 const VkMemoryAllocateInfo& allocInfo,
7935 bool isUserDataString,
7940 VkResult AllocateDedicatedMemory(
7942 VmaSuballocationType suballocType,
7943 uint32_t memTypeIndex,
7946 bool isUserDataString,
7948 VkBuffer dedicatedBuffer,
7949 VkBufferUsageFlags dedicatedBufferUsage,
7950 VkImage dedicatedImage,
7951 size_t allocationCount,
7960 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7962 uint32_t CalculateGlobalMemoryTypeBits()
const;
7964 bool GetFlushOrInvalidateRange(
7966 VkDeviceSize offset, VkDeviceSize size,
7967 VkMappedMemoryRange& outRange)
const;
7969 #if VMA_MEMORY_BUDGET
7970 void UpdateVulkanBudget();
7971 #endif // #if VMA_MEMORY_BUDGET
7977 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7979 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7982 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7984 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7987 template<
typename T>
7990 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7993 template<
typename T>
7994 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7996 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7999 template<
typename T>
8000 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8005 VmaFree(hAllocator, ptr);
8009 template<
typename T>
8010 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8014 for(
size_t i = count; i--; )
8016 VmaFree(hAllocator, ptr);
8023 #if VMA_STATS_STRING_ENABLED
8025 class VmaStringBuilder
8028 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8029 size_t GetLength()
const {
return m_Data.size(); }
8030 const char* GetData()
const {
return m_Data.data(); }
8032 void Add(
char ch) { m_Data.push_back(ch); }
8033 void Add(
const char* pStr);
8034 void AddNewLine() { Add(
'\n'); }
8035 void AddNumber(uint32_t num);
8036 void AddNumber(uint64_t num);
8037 void AddPointer(
const void* ptr);
8040 VmaVector< char, VmaStlAllocator<char> > m_Data;
8043 void VmaStringBuilder::Add(
const char* pStr)
8045 const size_t strLen = strlen(pStr);
8048 const size_t oldCount = m_Data.size();
8049 m_Data.resize(oldCount + strLen);
8050 memcpy(m_Data.data() + oldCount, pStr, strLen);
8054 void VmaStringBuilder::AddNumber(uint32_t num)
8061 *--p =
'0' + (num % 10);
8068 void VmaStringBuilder::AddNumber(uint64_t num)
8075 *--p =
'0' + (num % 10);
8082 void VmaStringBuilder::AddPointer(
const void* ptr)
8085 VmaPtrToStr(buf,
sizeof(buf), ptr);
8089 #endif // #if VMA_STATS_STRING_ENABLED
8094 #if VMA_STATS_STRING_ENABLED
8098 VMA_CLASS_NO_COPY(VmaJsonWriter)
8100 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8103 void BeginObject(
bool singleLine =
false);
8106 void BeginArray(
bool singleLine =
false);
8109 void WriteString(
const char* pStr);
8110 void BeginString(
const char* pStr = VMA_NULL);
8111 void ContinueString(
const char* pStr);
8112 void ContinueString(uint32_t n);
8113 void ContinueString(uint64_t n);
8114 void ContinueString_Pointer(
const void* ptr);
8115 void EndString(
const char* pStr = VMA_NULL);
8117 void WriteNumber(uint32_t n);
8118 void WriteNumber(uint64_t n);
8119 void WriteBool(
bool b);
8123 static const char*
const INDENT;
8125 enum COLLECTION_TYPE
8127 COLLECTION_TYPE_OBJECT,
8128 COLLECTION_TYPE_ARRAY,
8132 COLLECTION_TYPE type;
8133 uint32_t valueCount;
8134 bool singleLineMode;
8137 VmaStringBuilder& m_SB;
8138 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8139 bool m_InsideString;
8141 void BeginValue(
bool isString);
8142 void WriteIndent(
bool oneLess =
false);
8145 const char*
const VmaJsonWriter::INDENT =
" ";
8147 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8149 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8150 m_InsideString(false)
8154 VmaJsonWriter::~VmaJsonWriter()
8156 VMA_ASSERT(!m_InsideString);
8157 VMA_ASSERT(m_Stack.empty());
8160 void VmaJsonWriter::BeginObject(
bool singleLine)
8162 VMA_ASSERT(!m_InsideString);
8168 item.type = COLLECTION_TYPE_OBJECT;
8169 item.valueCount = 0;
8170 item.singleLineMode = singleLine;
8171 m_Stack.push_back(item);
8174 void VmaJsonWriter::EndObject()
8176 VMA_ASSERT(!m_InsideString);
8181 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8185 void VmaJsonWriter::BeginArray(
bool singleLine)
8187 VMA_ASSERT(!m_InsideString);
8193 item.type = COLLECTION_TYPE_ARRAY;
8194 item.valueCount = 0;
8195 item.singleLineMode = singleLine;
8196 m_Stack.push_back(item);
8199 void VmaJsonWriter::EndArray()
8201 VMA_ASSERT(!m_InsideString);
8206 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8210 void VmaJsonWriter::WriteString(
const char* pStr)
8216 void VmaJsonWriter::BeginString(
const char* pStr)
8218 VMA_ASSERT(!m_InsideString);
8222 m_InsideString =
true;
8223 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8225 ContinueString(pStr);
8229 void VmaJsonWriter::ContinueString(
const char* pStr)
8231 VMA_ASSERT(m_InsideString);
8233 const size_t strLen = strlen(pStr);
8234 for(
size_t i = 0; i < strLen; ++i)
8267 VMA_ASSERT(0 &&
"Character not currently supported.");
8273 void VmaJsonWriter::ContinueString(uint32_t n)
8275 VMA_ASSERT(m_InsideString);
8279 void VmaJsonWriter::ContinueString(uint64_t n)
8281 VMA_ASSERT(m_InsideString);
8285 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8287 VMA_ASSERT(m_InsideString);
8288 m_SB.AddPointer(ptr);
8291 void VmaJsonWriter::EndString(
const char* pStr)
8293 VMA_ASSERT(m_InsideString);
8294 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8296 ContinueString(pStr);
8299 m_InsideString =
false;
8302 void VmaJsonWriter::WriteNumber(uint32_t n)
8304 VMA_ASSERT(!m_InsideString);
8309 void VmaJsonWriter::WriteNumber(uint64_t n)
8311 VMA_ASSERT(!m_InsideString);
8316 void VmaJsonWriter::WriteBool(
bool b)
8318 VMA_ASSERT(!m_InsideString);
8320 m_SB.Add(b ?
"true" :
"false");
8323 void VmaJsonWriter::WriteNull()
8325 VMA_ASSERT(!m_InsideString);
8330 void VmaJsonWriter::BeginValue(
bool isString)
8332 if(!m_Stack.empty())
8334 StackItem& currItem = m_Stack.back();
8335 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8336 currItem.valueCount % 2 == 0)
8338 VMA_ASSERT(isString);
8341 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8342 currItem.valueCount % 2 != 0)
8346 else if(currItem.valueCount > 0)
8355 ++currItem.valueCount;
8359 void VmaJsonWriter::WriteIndent(
bool oneLess)
8361 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8365 size_t count = m_Stack.size();
8366 if(count > 0 && oneLess)
8370 for(
size_t i = 0; i < count; ++i)
8377 #endif // #if VMA_STATS_STRING_ENABLED
8381 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8383 if(IsUserDataString())
8385 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8387 FreeUserDataString(hAllocator);
8389 if(pUserData != VMA_NULL)
8391 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8396 m_pUserData = pUserData;
8400 void VmaAllocation_T::ChangeBlockAllocation(
8402 VmaDeviceMemoryBlock* block,
8403 VkDeviceSize offset)
8405 VMA_ASSERT(block != VMA_NULL);
8406 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8409 if(block != m_BlockAllocation.m_Block)
8411 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8412 if(IsPersistentMap())
8414 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8415 block->Map(hAllocator, mapRefCount, VMA_NULL);
8418 m_BlockAllocation.m_Block = block;
8419 m_BlockAllocation.m_Offset = offset;
8422 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8424 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8425 m_BlockAllocation.m_Offset = newOffset;
8428 VkDeviceSize VmaAllocation_T::GetOffset()
const
8432 case ALLOCATION_TYPE_BLOCK:
8433 return m_BlockAllocation.m_Offset;
8434 case ALLOCATION_TYPE_DEDICATED:
8442 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8446 case ALLOCATION_TYPE_BLOCK:
8447 return m_BlockAllocation.m_Block->GetDeviceMemory();
8448 case ALLOCATION_TYPE_DEDICATED:
8449 return m_DedicatedAllocation.m_hMemory;
8452 return VK_NULL_HANDLE;
8456 void* VmaAllocation_T::GetMappedData()
const
8460 case ALLOCATION_TYPE_BLOCK:
8463 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8464 VMA_ASSERT(pBlockData != VMA_NULL);
8465 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8472 case ALLOCATION_TYPE_DEDICATED:
8473 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8474 return m_DedicatedAllocation.m_pMappedData;
8481 bool VmaAllocation_T::CanBecomeLost()
const
8485 case ALLOCATION_TYPE_BLOCK:
8486 return m_BlockAllocation.m_CanBecomeLost;
8487 case ALLOCATION_TYPE_DEDICATED:
8495 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8497 VMA_ASSERT(CanBecomeLost());
8503 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8506 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8511 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8517 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8527 #if VMA_STATS_STRING_ENABLED
8530 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8539 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8541 json.WriteString(
"Type");
8542 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8544 json.WriteString(
"Size");
8545 json.WriteNumber(m_Size);
8547 if(m_pUserData != VMA_NULL)
8549 json.WriteString(
"UserData");
8550 if(IsUserDataString())
8552 json.WriteString((
const char*)m_pUserData);
8557 json.ContinueString_Pointer(m_pUserData);
8562 json.WriteString(
"CreationFrameIndex");
8563 json.WriteNumber(m_CreationFrameIndex);
8565 json.WriteString(
"LastUseFrameIndex");
8566 json.WriteNumber(GetLastUseFrameIndex());
8568 if(m_BufferImageUsage != 0)
8570 json.WriteString(
"Usage");
8571 json.WriteNumber(m_BufferImageUsage);
8577 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8579 VMA_ASSERT(IsUserDataString());
8580 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8581 m_pUserData = VMA_NULL;
8584 void VmaAllocation_T::BlockAllocMap()
8586 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8588 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8594 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8598 void VmaAllocation_T::BlockAllocUnmap()
8600 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8602 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8608 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8612 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8614 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8618 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8620 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8621 *ppData = m_DedicatedAllocation.m_pMappedData;
8627 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8628 return VK_ERROR_MEMORY_MAP_FAILED;
8633 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8634 hAllocator->m_hDevice,
8635 m_DedicatedAllocation.m_hMemory,
8640 if(result == VK_SUCCESS)
8642 m_DedicatedAllocation.m_pMappedData = *ppData;
8649 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8651 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8653 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8658 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8659 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8660 hAllocator->m_hDevice,
8661 m_DedicatedAllocation.m_hMemory);
8666 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8670 #if VMA_STATS_STRING_ENABLED
8672 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8676 json.WriteString(
"Blocks");
8679 json.WriteString(
"Allocations");
8682 json.WriteString(
"UnusedRanges");
8685 json.WriteString(
"UsedBytes");
8688 json.WriteString(
"UnusedBytes");
8693 json.WriteString(
"AllocationSize");
8694 json.BeginObject(
true);
8695 json.WriteString(
"Min");
8697 json.WriteString(
"Avg");
8699 json.WriteString(
"Max");
8706 json.WriteString(
"UnusedRangeSize");
8707 json.BeginObject(
true);
8708 json.WriteString(
"Min");
8710 json.WriteString(
"Avg");
8712 json.WriteString(
"Max");
8720 #endif // #if VMA_STATS_STRING_ENABLED
8722 struct VmaSuballocationItemSizeLess
8725 const VmaSuballocationList::iterator lhs,
8726 const VmaSuballocationList::iterator rhs)
const
8728 return lhs->size < rhs->size;
8731 const VmaSuballocationList::iterator lhs,
8732 VkDeviceSize rhsSize)
const
8734 return lhs->size < rhsSize;
8742 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8744 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8748 #if VMA_STATS_STRING_ENABLED
8750 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8751 VkDeviceSize unusedBytes,
8752 size_t allocationCount,
8753 size_t unusedRangeCount)
const
8757 json.WriteString(
"TotalBytes");
8758 json.WriteNumber(GetSize());
8760 json.WriteString(
"UnusedBytes");
8761 json.WriteNumber(unusedBytes);
8763 json.WriteString(
"Allocations");
8764 json.WriteNumber((uint64_t)allocationCount);
8766 json.WriteString(
"UnusedRanges");
8767 json.WriteNumber((uint64_t)unusedRangeCount);
8769 json.WriteString(
"Suballocations");
8773 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8774 VkDeviceSize offset,
8777 json.BeginObject(
true);
8779 json.WriteString(
"Offset");
8780 json.WriteNumber(offset);
8782 hAllocation->PrintParameters(json);
8787 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8788 VkDeviceSize offset,
8789 VkDeviceSize size)
const
8791 json.BeginObject(
true);
8793 json.WriteString(
"Offset");
8794 json.WriteNumber(offset);
8796 json.WriteString(
"Type");
8797 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8799 json.WriteString(
"Size");
8800 json.WriteNumber(size);
8805 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8811 #endif // #if VMA_STATS_STRING_ENABLED
8816 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8817 VmaBlockMetadata(hAllocator),
8820 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8821 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8825 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8829 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8831 VmaBlockMetadata::Init(size);
8834 m_SumFreeSize = size;
8836 VmaSuballocation suballoc = {};
8837 suballoc.offset = 0;
8838 suballoc.size = size;
8839 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8840 suballoc.hAllocation = VK_NULL_HANDLE;
8842 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8843 m_Suballocations.push_back(suballoc);
8844 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8846 m_FreeSuballocationsBySize.push_back(suballocItem);
8849 bool VmaBlockMetadata_Generic::Validate()
const
8851 VMA_VALIDATE(!m_Suballocations.empty());
8854 VkDeviceSize calculatedOffset = 0;
8856 uint32_t calculatedFreeCount = 0;
8858 VkDeviceSize calculatedSumFreeSize = 0;
8861 size_t freeSuballocationsToRegister = 0;
8863 bool prevFree =
false;
8865 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8866 suballocItem != m_Suballocations.cend();
8869 const VmaSuballocation& subAlloc = *suballocItem;
8872 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8874 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8876 VMA_VALIDATE(!prevFree || !currFree);
8878 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8882 calculatedSumFreeSize += subAlloc.size;
8883 ++calculatedFreeCount;
8884 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8886 ++freeSuballocationsToRegister;
8890 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8894 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8895 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8898 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8901 calculatedOffset += subAlloc.size;
8902 prevFree = currFree;
8907 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8909 VkDeviceSize lastSize = 0;
8910 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8912 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8915 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8917 VMA_VALIDATE(suballocItem->size >= lastSize);
8919 lastSize = suballocItem->size;
8923 VMA_VALIDATE(ValidateFreeSuballocationList());
8924 VMA_VALIDATE(calculatedOffset == GetSize());
8925 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8926 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8931 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8933 if(!m_FreeSuballocationsBySize.empty())
8935 return m_FreeSuballocationsBySize.back()->size;
8943 bool VmaBlockMetadata_Generic::IsEmpty()
const
8945 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8948 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8952 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8964 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8965 suballocItem != m_Suballocations.cend();
8968 const VmaSuballocation& suballoc = *suballocItem;
8969 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8982 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8984 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8986 inoutStats.
size += GetSize();
8993 #if VMA_STATS_STRING_ENABLED
8995 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8997 PrintDetailedMap_Begin(json,
8999 m_Suballocations.size() - (
size_t)m_FreeCount,
9003 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9004 suballocItem != m_Suballocations.cend();
9005 ++suballocItem, ++i)
9007 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9009 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9013 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9017 PrintDetailedMap_End(json);
9020 #endif // #if VMA_STATS_STRING_ENABLED
9022 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9023 uint32_t currentFrameIndex,
9024 uint32_t frameInUseCount,
9025 VkDeviceSize bufferImageGranularity,
9026 VkDeviceSize allocSize,
9027 VkDeviceSize allocAlignment,
9029 VmaSuballocationType allocType,
9030 bool canMakeOtherLost,
9032 VmaAllocationRequest* pAllocationRequest)
9034 VMA_ASSERT(allocSize > 0);
9035 VMA_ASSERT(!upperAddress);
9036 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9037 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9038 VMA_HEAVY_ASSERT(Validate());
9040 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9043 if(canMakeOtherLost ==
false &&
9044 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9050 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9051 if(freeSuballocCount > 0)
9056 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9057 m_FreeSuballocationsBySize.data(),
9058 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9059 allocSize + 2 * VMA_DEBUG_MARGIN,
9060 VmaSuballocationItemSizeLess());
9061 size_t index = it - m_FreeSuballocationsBySize.data();
9062 for(; index < freeSuballocCount; ++index)
9067 bufferImageGranularity,
9071 m_FreeSuballocationsBySize[index],
9073 &pAllocationRequest->offset,
9074 &pAllocationRequest->itemsToMakeLostCount,
9075 &pAllocationRequest->sumFreeSize,
9076 &pAllocationRequest->sumItemSize))
9078 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9083 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9085 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9086 it != m_Suballocations.end();
9089 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9092 bufferImageGranularity,
9098 &pAllocationRequest->offset,
9099 &pAllocationRequest->itemsToMakeLostCount,
9100 &pAllocationRequest->sumFreeSize,
9101 &pAllocationRequest->sumItemSize))
9103 pAllocationRequest->item = it;
9111 for(
size_t index = freeSuballocCount; index--; )
9116 bufferImageGranularity,
9120 m_FreeSuballocationsBySize[index],
9122 &pAllocationRequest->offset,
9123 &pAllocationRequest->itemsToMakeLostCount,
9124 &pAllocationRequest->sumFreeSize,
9125 &pAllocationRequest->sumItemSize))
9127 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9134 if(canMakeOtherLost)
9139 VmaAllocationRequest tmpAllocRequest = {};
9140 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9141 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9142 suballocIt != m_Suballocations.end();
9145 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9146 suballocIt->hAllocation->CanBecomeLost())
9151 bufferImageGranularity,
9157 &tmpAllocRequest.offset,
9158 &tmpAllocRequest.itemsToMakeLostCount,
9159 &tmpAllocRequest.sumFreeSize,
9160 &tmpAllocRequest.sumItemSize))
9164 *pAllocationRequest = tmpAllocRequest;
9165 pAllocationRequest->item = suballocIt;
9168 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9170 *pAllocationRequest = tmpAllocRequest;
9171 pAllocationRequest->item = suballocIt;
9184 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9185 uint32_t currentFrameIndex,
9186 uint32_t frameInUseCount,
9187 VmaAllocationRequest* pAllocationRequest)
9189 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9191 while(pAllocationRequest->itemsToMakeLostCount > 0)
9193 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9195 ++pAllocationRequest->item;
9197 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9198 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9199 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9200 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9202 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9203 --pAllocationRequest->itemsToMakeLostCount;
9211 VMA_HEAVY_ASSERT(Validate());
9212 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9213 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9218 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9220 uint32_t lostAllocationCount = 0;
9221 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9222 it != m_Suballocations.end();
9225 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9226 it->hAllocation->CanBecomeLost() &&
9227 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9229 it = FreeSuballocation(it);
9230 ++lostAllocationCount;
9233 return lostAllocationCount;
9236 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9238 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9239 it != m_Suballocations.end();
9242 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9244 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9246 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9247 return VK_ERROR_VALIDATION_FAILED_EXT;
9249 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9251 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9252 return VK_ERROR_VALIDATION_FAILED_EXT;
9260 void VmaBlockMetadata_Generic::Alloc(
9261 const VmaAllocationRequest& request,
9262 VmaSuballocationType type,
9263 VkDeviceSize allocSize,
9266 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9267 VMA_ASSERT(request.item != m_Suballocations.end());
9268 VmaSuballocation& suballoc = *request.item;
9270 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9272 VMA_ASSERT(request.offset >= suballoc.offset);
9273 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9274 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9275 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9279 UnregisterFreeSuballocation(request.item);
9281 suballoc.offset = request.offset;
9282 suballoc.size = allocSize;
9283 suballoc.type = type;
9284 suballoc.hAllocation = hAllocation;
9289 VmaSuballocation paddingSuballoc = {};
9290 paddingSuballoc.offset = request.offset + allocSize;
9291 paddingSuballoc.size = paddingEnd;
9292 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9293 VmaSuballocationList::iterator next = request.item;
9295 const VmaSuballocationList::iterator paddingEndItem =
9296 m_Suballocations.insert(next, paddingSuballoc);
9297 RegisterFreeSuballocation(paddingEndItem);
9303 VmaSuballocation paddingSuballoc = {};
9304 paddingSuballoc.offset = request.offset - paddingBegin;
9305 paddingSuballoc.size = paddingBegin;
9306 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9307 const VmaSuballocationList::iterator paddingBeginItem =
9308 m_Suballocations.insert(request.item, paddingSuballoc);
9309 RegisterFreeSuballocation(paddingBeginItem);
9313 m_FreeCount = m_FreeCount - 1;
9314 if(paddingBegin > 0)
9322 m_SumFreeSize -= allocSize;
9325 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9327 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9328 suballocItem != m_Suballocations.end();
9331 VmaSuballocation& suballoc = *suballocItem;
9332 if(suballoc.hAllocation == allocation)
9334 FreeSuballocation(suballocItem);
9335 VMA_HEAVY_ASSERT(Validate());
9339 VMA_ASSERT(0 &&
"Not found!");
9342 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9344 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9345 suballocItem != m_Suballocations.end();
9348 VmaSuballocation& suballoc = *suballocItem;
9349 if(suballoc.offset == offset)
9351 FreeSuballocation(suballocItem);
9355 VMA_ASSERT(0 &&
"Not found!");
9358 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9360 VkDeviceSize lastSize = 0;
9361 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9363 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9365 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9366 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9367 VMA_VALIDATE(it->size >= lastSize);
9368 lastSize = it->size;
9373 bool VmaBlockMetadata_Generic::CheckAllocation(
9374 uint32_t currentFrameIndex,
9375 uint32_t frameInUseCount,
9376 VkDeviceSize bufferImageGranularity,
9377 VkDeviceSize allocSize,
9378 VkDeviceSize allocAlignment,
9379 VmaSuballocationType allocType,
9380 VmaSuballocationList::const_iterator suballocItem,
9381 bool canMakeOtherLost,
9382 VkDeviceSize* pOffset,
9383 size_t* itemsToMakeLostCount,
9384 VkDeviceSize* pSumFreeSize,
9385 VkDeviceSize* pSumItemSize)
const
9387 VMA_ASSERT(allocSize > 0);
9388 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9389 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9390 VMA_ASSERT(pOffset != VMA_NULL);
9392 *itemsToMakeLostCount = 0;
9396 if(canMakeOtherLost)
9398 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9400 *pSumFreeSize = suballocItem->size;
9404 if(suballocItem->hAllocation->CanBecomeLost() &&
9405 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9407 ++*itemsToMakeLostCount;
9408 *pSumItemSize = suballocItem->size;
9417 if(GetSize() - suballocItem->offset < allocSize)
9423 *pOffset = suballocItem->offset;
9426 if(VMA_DEBUG_MARGIN > 0)
9428 *pOffset += VMA_DEBUG_MARGIN;
9432 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9436 if(bufferImageGranularity > 1)
9438 bool bufferImageGranularityConflict =
false;
9439 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9440 while(prevSuballocItem != m_Suballocations.cbegin())
9443 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9444 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9446 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9448 bufferImageGranularityConflict =
true;
9456 if(bufferImageGranularityConflict)
9458 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9464 if(*pOffset >= suballocItem->offset + suballocItem->size)
9470 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9473 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9475 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9477 if(suballocItem->offset + totalSize > GetSize())
9484 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9485 if(totalSize > suballocItem->size)
9487 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9488 while(remainingSize > 0)
9491 if(lastSuballocItem == m_Suballocations.cend())
9495 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9497 *pSumFreeSize += lastSuballocItem->size;
9501 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9502 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9503 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9505 ++*itemsToMakeLostCount;
9506 *pSumItemSize += lastSuballocItem->size;
9513 remainingSize = (lastSuballocItem->size < remainingSize) ?
9514 remainingSize - lastSuballocItem->size : 0;
9520 if(bufferImageGranularity > 1)
9522 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9524 while(nextSuballocItem != m_Suballocations.cend())
9526 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9527 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9529 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9531 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9532 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9533 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9535 ++*itemsToMakeLostCount;
9554 const VmaSuballocation& suballoc = *suballocItem;
9555 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9557 *pSumFreeSize = suballoc.size;
9560 if(suballoc.size < allocSize)
9566 *pOffset = suballoc.offset;
9569 if(VMA_DEBUG_MARGIN > 0)
9571 *pOffset += VMA_DEBUG_MARGIN;
9575 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9579 if(bufferImageGranularity > 1)
9581 bool bufferImageGranularityConflict =
false;
9582 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9583 while(prevSuballocItem != m_Suballocations.cbegin())
9586 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9587 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9589 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9591 bufferImageGranularityConflict =
true;
9599 if(bufferImageGranularityConflict)
9601 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9606 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9609 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9612 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9619 if(bufferImageGranularity > 1)
9621 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9623 while(nextSuballocItem != m_Suballocations.cend())
9625 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9626 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9628 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9647 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9649 VMA_ASSERT(item != m_Suballocations.end());
9650 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9652 VmaSuballocationList::iterator nextItem = item;
9654 VMA_ASSERT(nextItem != m_Suballocations.end());
9655 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9657 item->size += nextItem->size;
9659 m_Suballocations.erase(nextItem);
9662 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9665 VmaSuballocation& suballoc = *suballocItem;
9666 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9667 suballoc.hAllocation = VK_NULL_HANDLE;
9671 m_SumFreeSize += suballoc.size;
9674 bool mergeWithNext =
false;
9675 bool mergeWithPrev =
false;
9677 VmaSuballocationList::iterator nextItem = suballocItem;
9679 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9681 mergeWithNext =
true;
9684 VmaSuballocationList::iterator prevItem = suballocItem;
9685 if(suballocItem != m_Suballocations.begin())
9688 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9690 mergeWithPrev =
true;
9696 UnregisterFreeSuballocation(nextItem);
9697 MergeFreeWithNext(suballocItem);
9702 UnregisterFreeSuballocation(prevItem);
9703 MergeFreeWithNext(prevItem);
9704 RegisterFreeSuballocation(prevItem);
9709 RegisterFreeSuballocation(suballocItem);
9710 return suballocItem;
9714 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9716 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9717 VMA_ASSERT(item->size > 0);
9721 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9723 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9725 if(m_FreeSuballocationsBySize.empty())
9727 m_FreeSuballocationsBySize.push_back(item);
9731 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9739 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9741 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9742 VMA_ASSERT(item->size > 0);
9746 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9748 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9750 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9751 m_FreeSuballocationsBySize.data(),
9752 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9754 VmaSuballocationItemSizeLess());
9755 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9756 index < m_FreeSuballocationsBySize.size();
9759 if(m_FreeSuballocationsBySize[index] == item)
9761 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9764 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9766 VMA_ASSERT(0 &&
"Not found.");
9772 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9773 VkDeviceSize bufferImageGranularity,
9774 VmaSuballocationType& inOutPrevSuballocType)
const
9776 if(bufferImageGranularity == 1 || IsEmpty())
9781 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9782 bool typeConflictFound =
false;
9783 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9784 it != m_Suballocations.cend();
9787 const VmaSuballocationType suballocType = it->type;
9788 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9790 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9791 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9793 typeConflictFound =
true;
9795 inOutPrevSuballocType = suballocType;
9799 return typeConflictFound || minAlignment >= bufferImageGranularity;
9805 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9806 VmaBlockMetadata(hAllocator),
9808 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9809 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9810 m_1stVectorIndex(0),
9811 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9812 m_1stNullItemsBeginCount(0),
9813 m_1stNullItemsMiddleCount(0),
9814 m_2ndNullItemsCount(0)
9818 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9822 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9824 VmaBlockMetadata::Init(size);
9825 m_SumFreeSize = size;
9828 bool VmaBlockMetadata_Linear::Validate()
const
9830 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9831 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9833 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9834 VMA_VALIDATE(!suballocations1st.empty() ||
9835 suballocations2nd.empty() ||
9836 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9838 if(!suballocations1st.empty())
9841 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9843 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9845 if(!suballocations2nd.empty())
9848 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9851 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9852 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9854 VkDeviceSize sumUsedSize = 0;
9855 const size_t suballoc1stCount = suballocations1st.size();
9856 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9858 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9860 const size_t suballoc2ndCount = suballocations2nd.size();
9861 size_t nullItem2ndCount = 0;
9862 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9864 const VmaSuballocation& suballoc = suballocations2nd[i];
9865 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9867 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9868 VMA_VALIDATE(suballoc.offset >= offset);
9872 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9873 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9874 sumUsedSize += suballoc.size;
9881 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9884 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9887 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9889 const VmaSuballocation& suballoc = suballocations1st[i];
9890 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9891 suballoc.hAllocation == VK_NULL_HANDLE);
9894 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9896 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9898 const VmaSuballocation& suballoc = suballocations1st[i];
9899 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9901 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9902 VMA_VALIDATE(suballoc.offset >= offset);
9903 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9907 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9908 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9909 sumUsedSize += suballoc.size;
9916 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9918 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9920 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9922 const size_t suballoc2ndCount = suballocations2nd.size();
9923 size_t nullItem2ndCount = 0;
9924 for(
size_t i = suballoc2ndCount; i--; )
9926 const VmaSuballocation& suballoc = suballocations2nd[i];
9927 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9929 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9930 VMA_VALIDATE(suballoc.offset >= offset);
9934 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9935 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9936 sumUsedSize += suballoc.size;
9943 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9946 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9949 VMA_VALIDATE(offset <= GetSize());
9950 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9955 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9957 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9958 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9961 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9963 const VkDeviceSize size = GetSize();
9975 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9977 switch(m_2ndVectorMode)
9979 case SECOND_VECTOR_EMPTY:
9985 const size_t suballocations1stCount = suballocations1st.size();
9986 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9987 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9988 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9990 firstSuballoc.offset,
9991 size - (lastSuballoc.offset + lastSuballoc.size));
9995 case SECOND_VECTOR_RING_BUFFER:
10000 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10001 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10002 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10003 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10007 case SECOND_VECTOR_DOUBLE_STACK:
10012 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10013 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10014 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10015 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10025 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10027 const VkDeviceSize size = GetSize();
10028 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10029 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10030 const size_t suballoc1stCount = suballocations1st.size();
10031 const size_t suballoc2ndCount = suballocations2nd.size();
10042 VkDeviceSize lastOffset = 0;
10044 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10046 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10047 size_t nextAlloc2ndIndex = 0;
10048 while(lastOffset < freeSpace2ndTo1stEnd)
10051 while(nextAlloc2ndIndex < suballoc2ndCount &&
10052 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10054 ++nextAlloc2ndIndex;
10058 if(nextAlloc2ndIndex < suballoc2ndCount)
10060 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10063 if(lastOffset < suballoc.offset)
10066 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10080 lastOffset = suballoc.offset + suballoc.size;
10081 ++nextAlloc2ndIndex;
10087 if(lastOffset < freeSpace2ndTo1stEnd)
10089 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10097 lastOffset = freeSpace2ndTo1stEnd;
10102 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10103 const VkDeviceSize freeSpace1stTo2ndEnd =
10104 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10105 while(lastOffset < freeSpace1stTo2ndEnd)
10108 while(nextAlloc1stIndex < suballoc1stCount &&
10109 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10111 ++nextAlloc1stIndex;
10115 if(nextAlloc1stIndex < suballoc1stCount)
10117 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10120 if(lastOffset < suballoc.offset)
10123 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10137 lastOffset = suballoc.offset + suballoc.size;
10138 ++nextAlloc1stIndex;
10144 if(lastOffset < freeSpace1stTo2ndEnd)
10146 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10154 lastOffset = freeSpace1stTo2ndEnd;
10158 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10160 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10161 while(lastOffset < size)
10164 while(nextAlloc2ndIndex != SIZE_MAX &&
10165 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10167 --nextAlloc2ndIndex;
10171 if(nextAlloc2ndIndex != SIZE_MAX)
10173 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10176 if(lastOffset < suballoc.offset)
10179 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10193 lastOffset = suballoc.offset + suballoc.size;
10194 --nextAlloc2ndIndex;
10200 if(lastOffset < size)
10202 const VkDeviceSize unusedRangeSize = size - lastOffset;
10218 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10220 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10221 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10222 const VkDeviceSize size = GetSize();
10223 const size_t suballoc1stCount = suballocations1st.size();
10224 const size_t suballoc2ndCount = suballocations2nd.size();
10226 inoutStats.
size += size;
10228 VkDeviceSize lastOffset = 0;
10230 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10232 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10233 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10234 while(lastOffset < freeSpace2ndTo1stEnd)
10237 while(nextAlloc2ndIndex < suballoc2ndCount &&
10238 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10240 ++nextAlloc2ndIndex;
10244 if(nextAlloc2ndIndex < suballoc2ndCount)
10246 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10249 if(lastOffset < suballoc.offset)
10252 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10263 lastOffset = suballoc.offset + suballoc.size;
10264 ++nextAlloc2ndIndex;
10269 if(lastOffset < freeSpace2ndTo1stEnd)
10272 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10279 lastOffset = freeSpace2ndTo1stEnd;
10284 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10285 const VkDeviceSize freeSpace1stTo2ndEnd =
10286 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10287 while(lastOffset < freeSpace1stTo2ndEnd)
10290 while(nextAlloc1stIndex < suballoc1stCount &&
10291 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10293 ++nextAlloc1stIndex;
10297 if(nextAlloc1stIndex < suballoc1stCount)
10299 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10302 if(lastOffset < suballoc.offset)
10305 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10316 lastOffset = suballoc.offset + suballoc.size;
10317 ++nextAlloc1stIndex;
10322 if(lastOffset < freeSpace1stTo2ndEnd)
10325 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10332 lastOffset = freeSpace1stTo2ndEnd;
10336 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10338 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10339 while(lastOffset < size)
10342 while(nextAlloc2ndIndex != SIZE_MAX &&
10343 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10345 --nextAlloc2ndIndex;
10349 if(nextAlloc2ndIndex != SIZE_MAX)
10351 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10354 if(lastOffset < suballoc.offset)
10357 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10368 lastOffset = suballoc.offset + suballoc.size;
10369 --nextAlloc2ndIndex;
10374 if(lastOffset < size)
10377 const VkDeviceSize unusedRangeSize = size - lastOffset;
10390 #if VMA_STATS_STRING_ENABLED
10391 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10393 const VkDeviceSize size = GetSize();
10394 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10395 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10396 const size_t suballoc1stCount = suballocations1st.size();
10397 const size_t suballoc2ndCount = suballocations2nd.size();
10401 size_t unusedRangeCount = 0;
10402 VkDeviceSize usedBytes = 0;
10404 VkDeviceSize lastOffset = 0;
10406 size_t alloc2ndCount = 0;
10407 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10409 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10410 size_t nextAlloc2ndIndex = 0;
10411 while(lastOffset < freeSpace2ndTo1stEnd)
10414 while(nextAlloc2ndIndex < suballoc2ndCount &&
10415 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10417 ++nextAlloc2ndIndex;
10421 if(nextAlloc2ndIndex < suballoc2ndCount)
10423 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10426 if(lastOffset < suballoc.offset)
10429 ++unusedRangeCount;
10435 usedBytes += suballoc.size;
10438 lastOffset = suballoc.offset + suballoc.size;
10439 ++nextAlloc2ndIndex;
10444 if(lastOffset < freeSpace2ndTo1stEnd)
10447 ++unusedRangeCount;
10451 lastOffset = freeSpace2ndTo1stEnd;
10456 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10457 size_t alloc1stCount = 0;
10458 const VkDeviceSize freeSpace1stTo2ndEnd =
10459 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10460 while(lastOffset < freeSpace1stTo2ndEnd)
10463 while(nextAlloc1stIndex < suballoc1stCount &&
10464 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10466 ++nextAlloc1stIndex;
10470 if(nextAlloc1stIndex < suballoc1stCount)
10472 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10475 if(lastOffset < suballoc.offset)
10478 ++unusedRangeCount;
10484 usedBytes += suballoc.size;
10487 lastOffset = suballoc.offset + suballoc.size;
10488 ++nextAlloc1stIndex;
10493 if(lastOffset < size)
10496 ++unusedRangeCount;
10500 lastOffset = freeSpace1stTo2ndEnd;
10504 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10506 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10507 while(lastOffset < size)
10510 while(nextAlloc2ndIndex != SIZE_MAX &&
10511 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10513 --nextAlloc2ndIndex;
10517 if(nextAlloc2ndIndex != SIZE_MAX)
10519 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10522 if(lastOffset < suballoc.offset)
10525 ++unusedRangeCount;
10531 usedBytes += suballoc.size;
10534 lastOffset = suballoc.offset + suballoc.size;
10535 --nextAlloc2ndIndex;
10540 if(lastOffset < size)
10543 ++unusedRangeCount;
10552 const VkDeviceSize unusedBytes = size - usedBytes;
10553 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10558 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10560 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10561 size_t nextAlloc2ndIndex = 0;
10562 while(lastOffset < freeSpace2ndTo1stEnd)
10565 while(nextAlloc2ndIndex < suballoc2ndCount &&
10566 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10568 ++nextAlloc2ndIndex;
10572 if(nextAlloc2ndIndex < suballoc2ndCount)
10574 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10577 if(lastOffset < suballoc.offset)
10580 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10581 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10586 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10589 lastOffset = suballoc.offset + suballoc.size;
10590 ++nextAlloc2ndIndex;
10595 if(lastOffset < freeSpace2ndTo1stEnd)
10598 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10599 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10603 lastOffset = freeSpace2ndTo1stEnd;
10608 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10609 while(lastOffset < freeSpace1stTo2ndEnd)
10612 while(nextAlloc1stIndex < suballoc1stCount &&
10613 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10615 ++nextAlloc1stIndex;
10619 if(nextAlloc1stIndex < suballoc1stCount)
10621 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10624 if(lastOffset < suballoc.offset)
10627 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10628 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10633 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10636 lastOffset = suballoc.offset + suballoc.size;
10637 ++nextAlloc1stIndex;
10642 if(lastOffset < freeSpace1stTo2ndEnd)
10645 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10646 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10650 lastOffset = freeSpace1stTo2ndEnd;
10654 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10656 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10657 while(lastOffset < size)
10660 while(nextAlloc2ndIndex != SIZE_MAX &&
10661 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10663 --nextAlloc2ndIndex;
10667 if(nextAlloc2ndIndex != SIZE_MAX)
10669 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10672 if(lastOffset < suballoc.offset)
10675 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10676 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10681 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10684 lastOffset = suballoc.offset + suballoc.size;
10685 --nextAlloc2ndIndex;
10690 if(lastOffset < size)
10693 const VkDeviceSize unusedRangeSize = size - lastOffset;
10694 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10703 PrintDetailedMap_End(json);
10705 #endif // #if VMA_STATS_STRING_ENABLED
10707 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10708 uint32_t currentFrameIndex,
10709 uint32_t frameInUseCount,
10710 VkDeviceSize bufferImageGranularity,
10711 VkDeviceSize allocSize,
10712 VkDeviceSize allocAlignment,
10714 VmaSuballocationType allocType,
10715 bool canMakeOtherLost,
10717 VmaAllocationRequest* pAllocationRequest)
10719 VMA_ASSERT(allocSize > 0);
10720 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10721 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10722 VMA_HEAVY_ASSERT(Validate());
10723 return upperAddress ?
10724 CreateAllocationRequest_UpperAddress(
10725 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10726 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10727 CreateAllocationRequest_LowerAddress(
10728 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10729 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10732 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10733 uint32_t currentFrameIndex,
10734 uint32_t frameInUseCount,
10735 VkDeviceSize bufferImageGranularity,
10736 VkDeviceSize allocSize,
10737 VkDeviceSize allocAlignment,
10738 VmaSuballocationType allocType,
10739 bool canMakeOtherLost,
10741 VmaAllocationRequest* pAllocationRequest)
10743 const VkDeviceSize size = GetSize();
10744 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10745 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10747 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10749 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10754 if(allocSize > size)
10758 VkDeviceSize resultBaseOffset = size - allocSize;
10759 if(!suballocations2nd.empty())
10761 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10762 resultBaseOffset = lastSuballoc.offset - allocSize;
10763 if(allocSize > lastSuballoc.offset)
10770 VkDeviceSize resultOffset = resultBaseOffset;
10773 if(VMA_DEBUG_MARGIN > 0)
10775 if(resultOffset < VMA_DEBUG_MARGIN)
10779 resultOffset -= VMA_DEBUG_MARGIN;
10783 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10787 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10789 bool bufferImageGranularityConflict =
false;
10790 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10792 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10793 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10795 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10797 bufferImageGranularityConflict =
true;
10805 if(bufferImageGranularityConflict)
10807 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10812 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10813 suballocations1st.back().offset + suballocations1st.back().size :
10815 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10819 if(bufferImageGranularity > 1)
10821 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10823 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10824 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10826 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10840 pAllocationRequest->offset = resultOffset;
10841 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10842 pAllocationRequest->sumItemSize = 0;
10844 pAllocationRequest->itemsToMakeLostCount = 0;
10845 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10852 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10853 uint32_t currentFrameIndex,
10854 uint32_t frameInUseCount,
10855 VkDeviceSize bufferImageGranularity,
10856 VkDeviceSize allocSize,
10857 VkDeviceSize allocAlignment,
10858 VmaSuballocationType allocType,
10859 bool canMakeOtherLost,
10861 VmaAllocationRequest* pAllocationRequest)
10863 const VkDeviceSize size = GetSize();
10864 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10865 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10867 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10871 VkDeviceSize resultBaseOffset = 0;
10872 if(!suballocations1st.empty())
10874 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10875 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10879 VkDeviceSize resultOffset = resultBaseOffset;
10882 if(VMA_DEBUG_MARGIN > 0)
10884 resultOffset += VMA_DEBUG_MARGIN;
10888 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10892 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10894 bool bufferImageGranularityConflict =
false;
10895 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10897 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10898 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10900 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10902 bufferImageGranularityConflict =
true;
10910 if(bufferImageGranularityConflict)
10912 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10916 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10917 suballocations2nd.back().offset : size;
10920 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10924 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10926 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10928 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10929 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10931 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10945 pAllocationRequest->offset = resultOffset;
10946 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10947 pAllocationRequest->sumItemSize = 0;
10949 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10950 pAllocationRequest->itemsToMakeLostCount = 0;
10957 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10959 VMA_ASSERT(!suballocations1st.empty());
10961 VkDeviceSize resultBaseOffset = 0;
10962 if(!suballocations2nd.empty())
10964 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10965 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10969 VkDeviceSize resultOffset = resultBaseOffset;
10972 if(VMA_DEBUG_MARGIN > 0)
10974 resultOffset += VMA_DEBUG_MARGIN;
10978 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10982 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10984 bool bufferImageGranularityConflict =
false;
10985 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10987 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10988 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10990 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10992 bufferImageGranularityConflict =
true;
11000 if(bufferImageGranularityConflict)
11002 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11006 pAllocationRequest->itemsToMakeLostCount = 0;
11007 pAllocationRequest->sumItemSize = 0;
11008 size_t index1st = m_1stNullItemsBeginCount;
11010 if(canMakeOtherLost)
11012 while(index1st < suballocations1st.size() &&
11013 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11016 const VmaSuballocation& suballoc = suballocations1st[index1st];
11017 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11023 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11024 if(suballoc.hAllocation->CanBecomeLost() &&
11025 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11027 ++pAllocationRequest->itemsToMakeLostCount;
11028 pAllocationRequest->sumItemSize += suballoc.size;
11040 if(bufferImageGranularity > 1)
11042 while(index1st < suballocations1st.size())
11044 const VmaSuballocation& suballoc = suballocations1st[index1st];
11045 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11047 if(suballoc.hAllocation != VK_NULL_HANDLE)
11050 if(suballoc.hAllocation->CanBecomeLost() &&
11051 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11053 ++pAllocationRequest->itemsToMakeLostCount;
11054 pAllocationRequest->sumItemSize += suballoc.size;
11072 if(index1st == suballocations1st.size() &&
11073 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11076 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11081 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11082 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11086 if(bufferImageGranularity > 1)
11088 for(
size_t nextSuballocIndex = index1st;
11089 nextSuballocIndex < suballocations1st.size();
11090 nextSuballocIndex++)
11092 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11093 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11095 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11109 pAllocationRequest->offset = resultOffset;
11110 pAllocationRequest->sumFreeSize =
11111 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11113 - pAllocationRequest->sumItemSize;
11114 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11123 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11124 uint32_t currentFrameIndex,
11125 uint32_t frameInUseCount,
11126 VmaAllocationRequest* pAllocationRequest)
11128 if(pAllocationRequest->itemsToMakeLostCount == 0)
11133 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11136 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11137 size_t index = m_1stNullItemsBeginCount;
11138 size_t madeLostCount = 0;
11139 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11141 if(index == suballocations->size())
11145 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11147 suballocations = &AccessSuballocations2nd();
11151 VMA_ASSERT(!suballocations->empty());
11153 VmaSuballocation& suballoc = (*suballocations)[index];
11154 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11156 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11157 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11158 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11160 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11161 suballoc.hAllocation = VK_NULL_HANDLE;
11162 m_SumFreeSize += suballoc.size;
11163 if(suballocations == &AccessSuballocations1st())
11165 ++m_1stNullItemsMiddleCount;
11169 ++m_2ndNullItemsCount;
11181 CleanupAfterFree();
11187 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11189 uint32_t lostAllocationCount = 0;
11191 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11192 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11194 VmaSuballocation& suballoc = suballocations1st[i];
11195 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11196 suballoc.hAllocation->CanBecomeLost() &&
11197 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11199 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11200 suballoc.hAllocation = VK_NULL_HANDLE;
11201 ++m_1stNullItemsMiddleCount;
11202 m_SumFreeSize += suballoc.size;
11203 ++lostAllocationCount;
11207 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11208 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11210 VmaSuballocation& suballoc = suballocations2nd[i];
11211 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11212 suballoc.hAllocation->CanBecomeLost() &&
11213 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11215 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11216 suballoc.hAllocation = VK_NULL_HANDLE;
11217 ++m_2ndNullItemsCount;
11218 m_SumFreeSize += suballoc.size;
11219 ++lostAllocationCount;
11223 if(lostAllocationCount)
11225 CleanupAfterFree();
11228 return lostAllocationCount;
11231 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11233 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11234 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11236 const VmaSuballocation& suballoc = suballocations1st[i];
11237 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11239 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11241 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11242 return VK_ERROR_VALIDATION_FAILED_EXT;
11244 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11246 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11247 return VK_ERROR_VALIDATION_FAILED_EXT;
11252 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11253 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11255 const VmaSuballocation& suballoc = suballocations2nd[i];
11256 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11258 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11260 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11261 return VK_ERROR_VALIDATION_FAILED_EXT;
11263 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11265 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11266 return VK_ERROR_VALIDATION_FAILED_EXT;
11274 void VmaBlockMetadata_Linear::Alloc(
11275 const VmaAllocationRequest& request,
11276 VmaSuballocationType type,
11277 VkDeviceSize allocSize,
11280 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11282 switch(request.type)
11284 case VmaAllocationRequestType::UpperAddress:
11286 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11287 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11288 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11289 suballocations2nd.push_back(newSuballoc);
11290 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11293 case VmaAllocationRequestType::EndOf1st:
11295 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11297 VMA_ASSERT(suballocations1st.empty() ||
11298 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11300 VMA_ASSERT(request.offset + allocSize <= GetSize());
11302 suballocations1st.push_back(newSuballoc);
11305 case VmaAllocationRequestType::EndOf2nd:
11307 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11309 VMA_ASSERT(!suballocations1st.empty() &&
11310 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11311 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11313 switch(m_2ndVectorMode)
11315 case SECOND_VECTOR_EMPTY:
11317 VMA_ASSERT(suballocations2nd.empty());
11318 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11320 case SECOND_VECTOR_RING_BUFFER:
11322 VMA_ASSERT(!suballocations2nd.empty());
11324 case SECOND_VECTOR_DOUBLE_STACK:
11325 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11331 suballocations2nd.push_back(newSuballoc);
11335 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11338 m_SumFreeSize -= newSuballoc.size;
11341 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11343 FreeAtOffset(allocation->GetOffset());
11346 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11348 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11349 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11351 if(!suballocations1st.empty())
11354 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11355 if(firstSuballoc.offset == offset)
11357 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11358 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11359 m_SumFreeSize += firstSuballoc.size;
11360 ++m_1stNullItemsBeginCount;
11361 CleanupAfterFree();
11367 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11368 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11370 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11371 if(lastSuballoc.offset == offset)
11373 m_SumFreeSize += lastSuballoc.size;
11374 suballocations2nd.pop_back();
11375 CleanupAfterFree();
11380 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11382 VmaSuballocation& lastSuballoc = suballocations1st.back();
11383 if(lastSuballoc.offset == offset)
11385 m_SumFreeSize += lastSuballoc.size;
11386 suballocations1st.pop_back();
11387 CleanupAfterFree();
11394 VmaSuballocation refSuballoc;
11395 refSuballoc.offset = offset;
11397 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11398 suballocations1st.begin() + m_1stNullItemsBeginCount,
11399 suballocations1st.end(),
11401 VmaSuballocationOffsetLess());
11402 if(it != suballocations1st.end())
11404 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11405 it->hAllocation = VK_NULL_HANDLE;
11406 ++m_1stNullItemsMiddleCount;
11407 m_SumFreeSize += it->size;
11408 CleanupAfterFree();
11413 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11416 VmaSuballocation refSuballoc;
11417 refSuballoc.offset = offset;
11419 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11420 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11421 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11422 if(it != suballocations2nd.end())
11424 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11425 it->hAllocation = VK_NULL_HANDLE;
11426 ++m_2ndNullItemsCount;
11427 m_SumFreeSize += it->size;
11428 CleanupAfterFree();
11433 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11436 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11438 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11439 const size_t suballocCount = AccessSuballocations1st().size();
11440 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11443 void VmaBlockMetadata_Linear::CleanupAfterFree()
11445 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11446 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11450 suballocations1st.clear();
11451 suballocations2nd.clear();
11452 m_1stNullItemsBeginCount = 0;
11453 m_1stNullItemsMiddleCount = 0;
11454 m_2ndNullItemsCount = 0;
11455 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11459 const size_t suballoc1stCount = suballocations1st.size();
11460 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11461 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11464 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11465 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11467 ++m_1stNullItemsBeginCount;
11468 --m_1stNullItemsMiddleCount;
11472 while(m_1stNullItemsMiddleCount > 0 &&
11473 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11475 --m_1stNullItemsMiddleCount;
11476 suballocations1st.pop_back();
11480 while(m_2ndNullItemsCount > 0 &&
11481 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11483 --m_2ndNullItemsCount;
11484 suballocations2nd.pop_back();
11488 while(m_2ndNullItemsCount > 0 &&
11489 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11491 --m_2ndNullItemsCount;
11492 VmaVectorRemove(suballocations2nd, 0);
11495 if(ShouldCompact1st())
11497 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11498 size_t srcIndex = m_1stNullItemsBeginCount;
11499 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11501 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11505 if(dstIndex != srcIndex)
11507 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11511 suballocations1st.resize(nonNullItemCount);
11512 m_1stNullItemsBeginCount = 0;
11513 m_1stNullItemsMiddleCount = 0;
11517 if(suballocations2nd.empty())
11519 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11523 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11525 suballocations1st.clear();
11526 m_1stNullItemsBeginCount = 0;
11528 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11531 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11532 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11533 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11534 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11536 ++m_1stNullItemsBeginCount;
11537 --m_1stNullItemsMiddleCount;
11539 m_2ndNullItemsCount = 0;
11540 m_1stVectorIndex ^= 1;
11545 VMA_HEAVY_ASSERT(Validate());
11552 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11553 VmaBlockMetadata(hAllocator),
11555 m_AllocationCount(0),
11559 memset(m_FreeList, 0,
sizeof(m_FreeList));
11562 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11564 DeleteNode(m_Root);
11567 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11569 VmaBlockMetadata::Init(size);
11571 m_UsableSize = VmaPrevPow2(size);
11572 m_SumFreeSize = m_UsableSize;
11576 while(m_LevelCount < MAX_LEVELS &&
11577 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11582 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11583 rootNode->offset = 0;
11584 rootNode->type = Node::TYPE_FREE;
11585 rootNode->parent = VMA_NULL;
11586 rootNode->buddy = VMA_NULL;
11589 AddToFreeListFront(0, rootNode);
11592 bool VmaBlockMetadata_Buddy::Validate()
const
11595 ValidationContext ctx;
11596 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11598 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11600 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11601 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11604 for(uint32_t level = 0; level < m_LevelCount; ++level)
11606 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11607 m_FreeList[level].front->free.prev == VMA_NULL);
11609 for(Node* node = m_FreeList[level].front;
11611 node = node->free.next)
11613 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11615 if(node->free.next == VMA_NULL)
11617 VMA_VALIDATE(m_FreeList[level].back == node);
11621 VMA_VALIDATE(node->free.next->free.prev == node);
11627 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11629 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11635 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11637 for(uint32_t level = 0; level < m_LevelCount; ++level)
11639 if(m_FreeList[level].front != VMA_NULL)
11641 return LevelToNodeSize(level);
11647 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11649 const VkDeviceSize unusableSize = GetUnusableSize();
11660 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11662 if(unusableSize > 0)
11671 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11673 const VkDeviceSize unusableSize = GetUnusableSize();
11675 inoutStats.
size += GetSize();
11676 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11681 if(unusableSize > 0)
11688 #if VMA_STATS_STRING_ENABLED
11690 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11694 CalcAllocationStatInfo(stat);
11696 PrintDetailedMap_Begin(
11702 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11704 const VkDeviceSize unusableSize = GetUnusableSize();
11705 if(unusableSize > 0)
11707 PrintDetailedMap_UnusedRange(json,
11712 PrintDetailedMap_End(json);
11715 #endif // #if VMA_STATS_STRING_ENABLED
11717 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11718 uint32_t currentFrameIndex,
11719 uint32_t frameInUseCount,
11720 VkDeviceSize bufferImageGranularity,
11721 VkDeviceSize allocSize,
11722 VkDeviceSize allocAlignment,
11724 VmaSuballocationType allocType,
11725 bool canMakeOtherLost,
11727 VmaAllocationRequest* pAllocationRequest)
11729 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11733 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11734 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11735 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11737 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11738 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11741 if(allocSize > m_UsableSize)
11746 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11747 for(uint32_t level = targetLevel + 1; level--; )
11749 for(Node* freeNode = m_FreeList[level].front;
11750 freeNode != VMA_NULL;
11751 freeNode = freeNode->free.next)
11753 if(freeNode->offset % allocAlignment == 0)
11755 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11756 pAllocationRequest->offset = freeNode->offset;
11757 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11758 pAllocationRequest->sumItemSize = 0;
11759 pAllocationRequest->itemsToMakeLostCount = 0;
11760 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11769 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11770 uint32_t currentFrameIndex,
11771 uint32_t frameInUseCount,
11772 VmaAllocationRequest* pAllocationRequest)
11778 return pAllocationRequest->itemsToMakeLostCount == 0;
11781 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11790 void VmaBlockMetadata_Buddy::Alloc(
11791 const VmaAllocationRequest& request,
11792 VmaSuballocationType type,
11793 VkDeviceSize allocSize,
11796 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11798 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11799 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11801 Node* currNode = m_FreeList[currLevel].front;
11802 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11803 while(currNode->offset != request.offset)
11805 currNode = currNode->free.next;
11806 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11810 while(currLevel < targetLevel)
11814 RemoveFromFreeList(currLevel, currNode);
11816 const uint32_t childrenLevel = currLevel + 1;
11819 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11820 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11822 leftChild->offset = currNode->offset;
11823 leftChild->type = Node::TYPE_FREE;
11824 leftChild->parent = currNode;
11825 leftChild->buddy = rightChild;
11827 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11828 rightChild->type = Node::TYPE_FREE;
11829 rightChild->parent = currNode;
11830 rightChild->buddy = leftChild;
11833 currNode->type = Node::TYPE_SPLIT;
11834 currNode->split.leftChild = leftChild;
11837 AddToFreeListFront(childrenLevel, rightChild);
11838 AddToFreeListFront(childrenLevel, leftChild);
11843 currNode = m_FreeList[currLevel].front;
11852 VMA_ASSERT(currLevel == targetLevel &&
11853 currNode != VMA_NULL &&
11854 currNode->type == Node::TYPE_FREE);
11855 RemoveFromFreeList(currLevel, currNode);
11858 currNode->type = Node::TYPE_ALLOCATION;
11859 currNode->allocation.alloc = hAllocation;
11861 ++m_AllocationCount;
11863 m_SumFreeSize -= allocSize;
11866 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11868 if(node->type == Node::TYPE_SPLIT)
11870 DeleteNode(node->split.leftChild->buddy);
11871 DeleteNode(node->split.leftChild);
11874 vma_delete(GetAllocationCallbacks(), node);
11877 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11879 VMA_VALIDATE(level < m_LevelCount);
11880 VMA_VALIDATE(curr->parent == parent);
11881 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11882 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11885 case Node::TYPE_FREE:
11887 ctx.calculatedSumFreeSize += levelNodeSize;
11888 ++ctx.calculatedFreeCount;
11890 case Node::TYPE_ALLOCATION:
11891 ++ctx.calculatedAllocationCount;
11892 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11893 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11895 case Node::TYPE_SPLIT:
11897 const uint32_t childrenLevel = level + 1;
11898 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11899 const Node*
const leftChild = curr->split.leftChild;
11900 VMA_VALIDATE(leftChild != VMA_NULL);
11901 VMA_VALIDATE(leftChild->offset == curr->offset);
11902 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11904 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11906 const Node*
const rightChild = leftChild->buddy;
11907 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11908 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11910 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11921 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11924 uint32_t level = 0;
11925 VkDeviceSize currLevelNodeSize = m_UsableSize;
11926 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11927 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11930 currLevelNodeSize = nextLevelNodeSize;
11931 nextLevelNodeSize = currLevelNodeSize >> 1;
11936 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11939 Node* node = m_Root;
11940 VkDeviceSize nodeOffset = 0;
11941 uint32_t level = 0;
11942 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11943 while(node->type == Node::TYPE_SPLIT)
11945 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11946 if(offset < nodeOffset + nextLevelSize)
11948 node = node->split.leftChild;
11952 node = node->split.leftChild->buddy;
11953 nodeOffset += nextLevelSize;
11956 levelNodeSize = nextLevelSize;
11959 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11960 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11963 --m_AllocationCount;
11964 m_SumFreeSize += alloc->GetSize();
11966 node->type = Node::TYPE_FREE;
11969 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11971 RemoveFromFreeList(level, node->buddy);
11972 Node*
const parent = node->parent;
11974 vma_delete(GetAllocationCallbacks(), node->buddy);
11975 vma_delete(GetAllocationCallbacks(), node);
11976 parent->type = Node::TYPE_FREE;
11984 AddToFreeListFront(level, node);
11987 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11991 case Node::TYPE_FREE:
11997 case Node::TYPE_ALLOCATION:
11999 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12005 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12006 if(unusedRangeSize > 0)
12015 case Node::TYPE_SPLIT:
12017 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12018 const Node*
const leftChild = node->split.leftChild;
12019 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12020 const Node*
const rightChild = leftChild->buddy;
12021 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12029 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12031 VMA_ASSERT(node->type == Node::TYPE_FREE);
12034 Node*
const frontNode = m_FreeList[level].front;
12035 if(frontNode == VMA_NULL)
12037 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12038 node->free.prev = node->free.next = VMA_NULL;
12039 m_FreeList[level].front = m_FreeList[level].back = node;
12043 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12044 node->free.prev = VMA_NULL;
12045 node->free.next = frontNode;
12046 frontNode->free.prev = node;
12047 m_FreeList[level].front = node;
12051 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12053 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12056 if(node->free.prev == VMA_NULL)
12058 VMA_ASSERT(m_FreeList[level].front == node);
12059 m_FreeList[level].front = node->free.next;
12063 Node*
const prevFreeNode = node->free.prev;
12064 VMA_ASSERT(prevFreeNode->free.next == node);
12065 prevFreeNode->free.next = node->free.next;
12069 if(node->free.next == VMA_NULL)
12071 VMA_ASSERT(m_FreeList[level].back == node);
12072 m_FreeList[level].back = node->free.prev;
12076 Node*
const nextFreeNode = node->free.next;
12077 VMA_ASSERT(nextFreeNode->free.prev == node);
12078 nextFreeNode->free.prev = node->free.prev;
12082 #if VMA_STATS_STRING_ENABLED
12083 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12087 case Node::TYPE_FREE:
12088 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12090 case Node::TYPE_ALLOCATION:
12092 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12093 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12094 if(allocSize < levelNodeSize)
12096 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12100 case Node::TYPE_SPLIT:
12102 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12103 const Node*
const leftChild = node->split.leftChild;
12104 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12105 const Node*
const rightChild = leftChild->buddy;
12106 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12113 #endif // #if VMA_STATS_STRING_ENABLED
12119 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12120 m_pMetadata(VMA_NULL),
12121 m_MemoryTypeIndex(UINT32_MAX),
12123 m_hMemory(VK_NULL_HANDLE),
12125 m_pMappedData(VMA_NULL)
12129 void VmaDeviceMemoryBlock::Init(
12132 uint32_t newMemoryTypeIndex,
12133 VkDeviceMemory newMemory,
12134 VkDeviceSize newSize,
12136 uint32_t algorithm)
12138 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12140 m_hParentPool = hParentPool;
12141 m_MemoryTypeIndex = newMemoryTypeIndex;
12143 m_hMemory = newMemory;
12148 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12151 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12157 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12159 m_pMetadata->Init(newSize);
12162 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12166 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12168 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12169 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12170 m_hMemory = VK_NULL_HANDLE;
12172 vma_delete(allocator, m_pMetadata);
12173 m_pMetadata = VMA_NULL;
12176 bool VmaDeviceMemoryBlock::Validate()
const
12178 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12179 (m_pMetadata->GetSize() != 0));
12181 return m_pMetadata->Validate();
12184 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12186 void* pData =
nullptr;
12187 VkResult res = Map(hAllocator, 1, &pData);
12188 if(res != VK_SUCCESS)
12193 res = m_pMetadata->CheckCorruption(pData);
12195 Unmap(hAllocator, 1);
12200 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12207 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12208 if(m_MapCount != 0)
12210 m_MapCount += count;
12211 VMA_ASSERT(m_pMappedData != VMA_NULL);
12212 if(ppData != VMA_NULL)
12214 *ppData = m_pMappedData;
12220 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12221 hAllocator->m_hDevice,
12227 if(result == VK_SUCCESS)
12229 if(ppData != VMA_NULL)
12231 *ppData = m_pMappedData;
12233 m_MapCount = count;
12239 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12246 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12247 if(m_MapCount >= count)
12249 m_MapCount -= count;
12250 if(m_MapCount == 0)
12252 m_pMappedData = VMA_NULL;
12253 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12258 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12262 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12264 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12265 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12268 VkResult res = Map(hAllocator, 1, &pData);
12269 if(res != VK_SUCCESS)
12274 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12275 VmaWriteMagicValue(pData, allocOffset + allocSize);
12277 Unmap(hAllocator, 1);
12282 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12284 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12285 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12288 VkResult res = Map(hAllocator, 1, &pData);
12289 if(res != VK_SUCCESS)
12294 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12296 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12298 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12300 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12303 Unmap(hAllocator, 1);
12308 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12311 VkDeviceSize allocationLocalOffset,
12315 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12316 hAllocation->GetBlock() ==
this);
12317 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12318 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12319 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12321 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12322 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12325 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12328 VkDeviceSize allocationLocalOffset,
12332 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12333 hAllocation->GetBlock() ==
this);
12334 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12335 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12336 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12338 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12339 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12344 memset(&outInfo, 0,
sizeof(outInfo));
12363 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12371 VmaPool_T::VmaPool_T(
12374 VkDeviceSize preferredBlockSize) :
12378 createInfo.memoryTypeIndex,
12379 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12380 createInfo.minBlockCount,
12381 createInfo.maxBlockCount,
12383 createInfo.frameInUseCount,
12384 createInfo.blockSize != 0,
12391 VmaPool_T::~VmaPool_T()
12395 void VmaPool_T::SetName(
const char* pName)
12397 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12398 VmaFreeString(allocs, m_Name);
12400 if(pName != VMA_NULL)
12402 m_Name = VmaCreateStringCopy(allocs, pName);
12410 #if VMA_STATS_STRING_ENABLED
12412 #endif // #if VMA_STATS_STRING_ENABLED
12414 VmaBlockVector::VmaBlockVector(
12417 uint32_t memoryTypeIndex,
12418 VkDeviceSize preferredBlockSize,
12419 size_t minBlockCount,
12420 size_t maxBlockCount,
12421 VkDeviceSize bufferImageGranularity,
12422 uint32_t frameInUseCount,
12423 bool explicitBlockSize,
12424 uint32_t algorithm) :
12425 m_hAllocator(hAllocator),
12426 m_hParentPool(hParentPool),
12427 m_MemoryTypeIndex(memoryTypeIndex),
12428 m_PreferredBlockSize(preferredBlockSize),
12429 m_MinBlockCount(minBlockCount),
12430 m_MaxBlockCount(maxBlockCount),
12431 m_BufferImageGranularity(bufferImageGranularity),
12432 m_FrameInUseCount(frameInUseCount),
12433 m_ExplicitBlockSize(explicitBlockSize),
12434 m_Algorithm(algorithm),
12435 m_HasEmptyBlock(false),
12436 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12441 VmaBlockVector::~VmaBlockVector()
12443 for(
size_t i = m_Blocks.size(); i--; )
12445 m_Blocks[i]->Destroy(m_hAllocator);
12446 vma_delete(m_hAllocator, m_Blocks[i]);
12450 VkResult VmaBlockVector::CreateMinBlocks()
12452 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12454 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12455 if(res != VK_SUCCESS)
12463 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12465 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12467 const size_t blockCount = m_Blocks.size();
12476 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12478 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12479 VMA_ASSERT(pBlock);
12480 VMA_HEAVY_ASSERT(pBlock->Validate());
12481 pBlock->m_pMetadata->AddPoolStats(*pStats);
12485 bool VmaBlockVector::IsEmpty()
12487 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12488 return m_Blocks.empty();
12491 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12493 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12494 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12495 (VMA_DEBUG_MARGIN > 0) &&
12497 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12500 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12502 VkResult VmaBlockVector::Allocate(
12503 uint32_t currentFrameIndex,
12505 VkDeviceSize alignment,
12507 VmaSuballocationType suballocType,
12508 size_t allocationCount,
12512 VkResult res = VK_SUCCESS;
12514 if(IsCorruptionDetectionEnabled())
12516 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12517 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12521 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12522 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12524 res = AllocatePage(
12530 pAllocations + allocIndex);
12531 if(res != VK_SUCCESS)
12538 if(res != VK_SUCCESS)
12541 while(allocIndex--)
12543 Free(pAllocations[allocIndex]);
12545 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12551 VkResult VmaBlockVector::AllocatePage(
12552 uint32_t currentFrameIndex,
12554 VkDeviceSize alignment,
12556 VmaSuballocationType suballocType,
12564 VkDeviceSize freeMemory;
12566 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12568 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12572 const bool canFallbackToDedicated = !IsCustomPool();
12573 const bool canCreateNewBlock =
12575 (m_Blocks.size() < m_MaxBlockCount) &&
12576 (freeMemory >= size || !canFallbackToDedicated);
12583 canMakeOtherLost =
false;
12587 if(isUpperAddress &&
12590 return VK_ERROR_FEATURE_NOT_PRESENT;
12604 return VK_ERROR_FEATURE_NOT_PRESENT;
12608 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12610 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12618 if(!canMakeOtherLost || canCreateNewBlock)
12627 if(!m_Blocks.empty())
12629 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12630 VMA_ASSERT(pCurrBlock);
12631 VkResult res = AllocateFromBlock(
12641 if(res == VK_SUCCESS)
12643 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12653 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12655 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12656 VMA_ASSERT(pCurrBlock);
12657 VkResult res = AllocateFromBlock(
12667 if(res == VK_SUCCESS)
12669 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12677 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12679 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12680 VMA_ASSERT(pCurrBlock);
12681 VkResult res = AllocateFromBlock(
12691 if(res == VK_SUCCESS)
12693 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12701 if(canCreateNewBlock)
12704 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12705 uint32_t newBlockSizeShift = 0;
12706 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12708 if(!m_ExplicitBlockSize)
12711 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12712 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12714 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12715 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12717 newBlockSize = smallerNewBlockSize;
12718 ++newBlockSizeShift;
12727 size_t newBlockIndex = 0;
12728 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12729 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12731 if(!m_ExplicitBlockSize)
12733 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12735 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12736 if(smallerNewBlockSize >= size)
12738 newBlockSize = smallerNewBlockSize;
12739 ++newBlockSizeShift;
12740 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12741 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12750 if(res == VK_SUCCESS)
12752 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12753 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12755 res = AllocateFromBlock(
12765 if(res == VK_SUCCESS)
12767 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12773 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12780 if(canMakeOtherLost)
12782 uint32_t tryIndex = 0;
12783 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12785 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12786 VmaAllocationRequest bestRequest = {};
12787 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12793 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12795 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12796 VMA_ASSERT(pCurrBlock);
12797 VmaAllocationRequest currRequest = {};
12798 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12801 m_BufferImageGranularity,
12810 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12811 if(pBestRequestBlock == VMA_NULL ||
12812 currRequestCost < bestRequestCost)
12814 pBestRequestBlock = pCurrBlock;
12815 bestRequest = currRequest;
12816 bestRequestCost = currRequestCost;
12818 if(bestRequestCost == 0)
12829 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12831 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12832 VMA_ASSERT(pCurrBlock);
12833 VmaAllocationRequest currRequest = {};
12834 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12837 m_BufferImageGranularity,
12846 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12847 if(pBestRequestBlock == VMA_NULL ||
12848 currRequestCost < bestRequestCost ||
12851 pBestRequestBlock = pCurrBlock;
12852 bestRequest = currRequest;
12853 bestRequestCost = currRequestCost;
12855 if(bestRequestCost == 0 ||
12865 if(pBestRequestBlock != VMA_NULL)
12869 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12870 if(res != VK_SUCCESS)
12876 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12882 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12883 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12884 UpdateHasEmptyBlock();
12885 (*pAllocation)->InitBlockAllocation(
12887 bestRequest.offset,
12894 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12895 VMA_DEBUG_LOG(
" Returned from existing block");
12896 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12897 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12898 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12900 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12902 if(IsCorruptionDetectionEnabled())
12904 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12905 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12920 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12922 return VK_ERROR_TOO_MANY_OBJECTS;
12926 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12929 void VmaBlockVector::Free(
12932 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12934 bool budgetExceeded =
false;
12936 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12938 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12939 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12944 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12946 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12948 if(IsCorruptionDetectionEnabled())
12950 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12951 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12954 if(hAllocation->IsPersistentMap())
12956 pBlock->Unmap(m_hAllocator, 1);
12959 pBlock->m_pMetadata->Free(hAllocation);
12960 VMA_HEAVY_ASSERT(pBlock->Validate());
12962 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12964 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12966 if(pBlock->m_pMetadata->IsEmpty())
12969 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12971 pBlockToDelete = pBlock;
12978 else if(m_HasEmptyBlock && canDeleteBlock)
12980 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12981 if(pLastBlock->m_pMetadata->IsEmpty())
12983 pBlockToDelete = pLastBlock;
12984 m_Blocks.pop_back();
12988 UpdateHasEmptyBlock();
12989 IncrementallySortBlocks();
12994 if(pBlockToDelete != VMA_NULL)
12996 VMA_DEBUG_LOG(
" Deleted empty block");
12997 pBlockToDelete->Destroy(m_hAllocator);
12998 vma_delete(m_hAllocator, pBlockToDelete);
13002 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13004 VkDeviceSize result = 0;
13005 for(
size_t i = m_Blocks.size(); i--; )
13007 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13008 if(result >= m_PreferredBlockSize)
13016 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13018 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13020 if(m_Blocks[blockIndex] == pBlock)
13022 VmaVectorRemove(m_Blocks, blockIndex);
13029 void VmaBlockVector::IncrementallySortBlocks()
13034 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13036 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13038 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13045 VkResult VmaBlockVector::AllocateFromBlock(
13046 VmaDeviceMemoryBlock* pBlock,
13047 uint32_t currentFrameIndex,
13049 VkDeviceSize alignment,
13052 VmaSuballocationType suballocType,
13061 VmaAllocationRequest currRequest = {};
13062 if(pBlock->m_pMetadata->CreateAllocationRequest(
13065 m_BufferImageGranularity,
13075 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13079 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13080 if(res != VK_SUCCESS)
13086 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13087 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13088 UpdateHasEmptyBlock();
13089 (*pAllocation)->InitBlockAllocation(
13091 currRequest.offset,
13098 VMA_HEAVY_ASSERT(pBlock->Validate());
13099 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13100 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13101 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13103 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13105 if(IsCorruptionDetectionEnabled())
13107 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13108 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13112 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13115 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13117 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13118 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13119 allocInfo.allocationSize = blockSize;
13121 #if VMA_BUFFER_DEVICE_ADDRESS
13123 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13124 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13126 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13127 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13129 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
13131 VkDeviceMemory mem = VK_NULL_HANDLE;
13132 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13141 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13147 allocInfo.allocationSize,
13151 m_Blocks.push_back(pBlock);
13152 if(pNewBlockIndex != VMA_NULL)
13154 *pNewBlockIndex = m_Blocks.size() - 1;
13160 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13161 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13162 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13164 const size_t blockCount = m_Blocks.size();
13165 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13169 BLOCK_FLAG_USED = 0x00000001,
13170 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13178 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13179 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13180 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13183 const size_t moveCount = moves.size();
13184 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13186 const VmaDefragmentationMove& move = moves[moveIndex];
13187 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13188 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13191 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13194 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13196 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13197 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13198 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13200 currBlockInfo.pMappedData = pBlock->GetMappedData();
13202 if(currBlockInfo.pMappedData == VMA_NULL)
13204 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13205 if(pDefragCtx->res == VK_SUCCESS)
13207 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13214 if(pDefragCtx->res == VK_SUCCESS)
13216 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13217 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13219 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13221 const VmaDefragmentationMove& move = moves[moveIndex];
13223 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13224 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13226 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13231 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13232 memRange.memory = pSrcBlock->GetDeviceMemory();
13233 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13234 memRange.size = VMA_MIN(
13235 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13236 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13237 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13242 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13243 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13244 static_cast<size_t>(move.size));
13246 if(IsCorruptionDetectionEnabled())
13248 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13249 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13255 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13256 memRange.memory = pDstBlock->GetDeviceMemory();
13257 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13258 memRange.size = VMA_MIN(
13259 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13260 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13261 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13268 for(
size_t blockIndex = blockCount; blockIndex--; )
13270 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13271 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13273 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13274 pBlock->Unmap(m_hAllocator, 1);
13279 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13280 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13281 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13282 VkCommandBuffer commandBuffer)
13284 const size_t blockCount = m_Blocks.size();
13286 pDefragCtx->blockContexts.resize(blockCount);
13287 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13290 const size_t moveCount = moves.size();
13291 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13293 const VmaDefragmentationMove& move = moves[moveIndex];
13298 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13299 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13303 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13307 VkBufferCreateInfo bufCreateInfo;
13308 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13310 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13312 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13313 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13314 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13316 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13317 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13318 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13319 if(pDefragCtx->res == VK_SUCCESS)
13321 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13322 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13329 if(pDefragCtx->res == VK_SUCCESS)
13331 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13333 const VmaDefragmentationMove& move = moves[moveIndex];
13335 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13336 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13338 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13340 VkBufferCopy region = {
13344 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13345 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13350 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13352 pDefragCtx->res = VK_NOT_READY;
13358 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13360 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13361 if(pBlock->m_pMetadata->IsEmpty())
13363 if(m_Blocks.size() > m_MinBlockCount)
13365 if(pDefragmentationStats != VMA_NULL)
13368 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13371 VmaVectorRemove(m_Blocks, blockIndex);
13372 pBlock->Destroy(m_hAllocator);
13373 vma_delete(m_hAllocator, pBlock);
13381 UpdateHasEmptyBlock();
13384 void VmaBlockVector::UpdateHasEmptyBlock()
13386 m_HasEmptyBlock =
false;
13387 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13389 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13390 if(pBlock->m_pMetadata->IsEmpty())
13392 m_HasEmptyBlock =
true;
13398 #if VMA_STATS_STRING_ENABLED
13400 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13402 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13404 json.BeginObject();
13408 const char* poolName = m_hParentPool->GetName();
13409 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13411 json.WriteString(
"Name");
13412 json.WriteString(poolName);
13415 json.WriteString(
"MemoryTypeIndex");
13416 json.WriteNumber(m_MemoryTypeIndex);
13418 json.WriteString(
"BlockSize");
13419 json.WriteNumber(m_PreferredBlockSize);
13421 json.WriteString(
"BlockCount");
13422 json.BeginObject(
true);
13423 if(m_MinBlockCount > 0)
13425 json.WriteString(
"Min");
13426 json.WriteNumber((uint64_t)m_MinBlockCount);
13428 if(m_MaxBlockCount < SIZE_MAX)
13430 json.WriteString(
"Max");
13431 json.WriteNumber((uint64_t)m_MaxBlockCount);
13433 json.WriteString(
"Cur");
13434 json.WriteNumber((uint64_t)m_Blocks.size());
13437 if(m_FrameInUseCount > 0)
13439 json.WriteString(
"FrameInUseCount");
13440 json.WriteNumber(m_FrameInUseCount);
13443 if(m_Algorithm != 0)
13445 json.WriteString(
"Algorithm");
13446 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13451 json.WriteString(
"PreferredBlockSize");
13452 json.WriteNumber(m_PreferredBlockSize);
13455 json.WriteString(
"Blocks");
13456 json.BeginObject();
13457 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13459 json.BeginString();
13460 json.ContinueString(m_Blocks[i]->GetId());
13463 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13470 #endif // #if VMA_STATS_STRING_ENABLED
13472 void VmaBlockVector::Defragment(
13473 class VmaBlockVectorDefragmentationContext* pCtx,
13475 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13476 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13477 VkCommandBuffer commandBuffer)
13479 pCtx->res = VK_SUCCESS;
13481 const VkMemoryPropertyFlags memPropFlags =
13482 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13483 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13485 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13487 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13488 !IsCorruptionDetectionEnabled() &&
13489 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13492 if(canDefragmentOnCpu || canDefragmentOnGpu)
13494 bool defragmentOnGpu;
13496 if(canDefragmentOnGpu != canDefragmentOnCpu)
13498 defragmentOnGpu = canDefragmentOnGpu;
13503 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13504 m_hAllocator->IsIntegratedGpu();
13507 bool overlappingMoveSupported = !defragmentOnGpu;
13509 if(m_hAllocator->m_UseMutex)
13513 if(!m_Mutex.TryLockWrite())
13515 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13521 m_Mutex.LockWrite();
13522 pCtx->mutexLocked =
true;
13526 pCtx->Begin(overlappingMoveSupported, flags);
13530 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13531 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13532 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13535 if(pStats != VMA_NULL)
13537 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13538 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13541 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13542 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13543 if(defragmentOnGpu)
13545 maxGpuBytesToMove -= bytesMoved;
13546 maxGpuAllocationsToMove -= allocationsMoved;
13550 maxCpuBytesToMove -= bytesMoved;
13551 maxCpuAllocationsToMove -= allocationsMoved;
13557 if(m_hAllocator->m_UseMutex)
13558 m_Mutex.UnlockWrite();
13560 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13561 pCtx->res = VK_NOT_READY;
13566 if(pCtx->res >= VK_SUCCESS)
13568 if(defragmentOnGpu)
13570 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13574 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13580 void VmaBlockVector::DefragmentationEnd(
13581 class VmaBlockVectorDefragmentationContext* pCtx,
13587 VMA_ASSERT(pCtx->mutexLocked ==
false);
13591 m_Mutex.LockWrite();
13592 pCtx->mutexLocked =
true;
13596 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13599 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13601 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13602 if(blockCtx.hBuffer)
13604 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13608 if(pCtx->res >= VK_SUCCESS)
13610 FreeEmptyBlocks(pStats);
13614 if(pCtx->mutexLocked)
13616 VMA_ASSERT(m_hAllocator->m_UseMutex);
13617 m_Mutex.UnlockWrite();
13621 uint32_t VmaBlockVector::ProcessDefragmentations(
13622 class VmaBlockVectorDefragmentationContext *pCtx,
13625 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13627 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13629 for(uint32_t i = 0; i < moveCount; ++ i)
13631 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13634 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13635 pMove->
offset = move.dstOffset;
13640 pCtx->defragmentationMovesProcessed += moveCount;
13645 void VmaBlockVector::CommitDefragmentations(
13646 class VmaBlockVectorDefragmentationContext *pCtx,
13649 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13651 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13653 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13655 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13656 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13659 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13660 FreeEmptyBlocks(pStats);
13663 size_t VmaBlockVector::CalcAllocationCount()
const
13666 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13668 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13673 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13675 if(m_BufferImageGranularity == 1)
13679 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13680 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13682 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13683 VMA_ASSERT(m_Algorithm == 0);
13684 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13685 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13693 void VmaBlockVector::MakePoolAllocationsLost(
13694 uint32_t currentFrameIndex,
13695 size_t* pLostAllocationCount)
13697 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13698 size_t lostAllocationCount = 0;
13699 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13701 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13702 VMA_ASSERT(pBlock);
13703 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13705 if(pLostAllocationCount != VMA_NULL)
13707 *pLostAllocationCount = lostAllocationCount;
13711 VkResult VmaBlockVector::CheckCorruption()
13713 if(!IsCorruptionDetectionEnabled())
13715 return VK_ERROR_FEATURE_NOT_PRESENT;
13718 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13719 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13721 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13722 VMA_ASSERT(pBlock);
13723 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13724 if(res != VK_SUCCESS)
13732 void VmaBlockVector::AddStats(
VmaStats* pStats)
13734 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13735 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13737 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13739 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13741 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13742 VMA_ASSERT(pBlock);
13743 VMA_HEAVY_ASSERT(pBlock->Validate());
13745 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13746 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13747 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13748 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13755 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13757 VmaBlockVector* pBlockVector,
13758 uint32_t currentFrameIndex,
13759 bool overlappingMoveSupported) :
13760 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13761 m_AllocationCount(0),
13762 m_AllAllocations(false),
13764 m_AllocationsMoved(0),
13765 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13768 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13769 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13771 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13772 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13773 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13774 m_Blocks.push_back(pBlockInfo);
13778 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13781 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13783 for(
size_t i = m_Blocks.size(); i--; )
13785 vma_delete(m_hAllocator, m_Blocks[i]);
13789 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13792 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13794 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13795 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13796 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13798 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13799 (*it)->m_Allocations.push_back(allocInfo);
13806 ++m_AllocationCount;
13810 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13811 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13812 VkDeviceSize maxBytesToMove,
13813 uint32_t maxAllocationsToMove,
13814 bool freeOldAllocations)
13816 if(m_Blocks.empty())
13829 size_t srcBlockMinIndex = 0;
13842 size_t srcBlockIndex = m_Blocks.size() - 1;
13843 size_t srcAllocIndex = SIZE_MAX;
13849 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13851 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13854 if(srcBlockIndex == srcBlockMinIndex)
13861 srcAllocIndex = SIZE_MAX;
13866 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13870 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13871 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13873 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13874 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13875 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13876 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13879 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13881 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13882 VmaAllocationRequest dstAllocRequest;
13883 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13884 m_CurrentFrameIndex,
13885 m_pBlockVector->GetFrameInUseCount(),
13886 m_pBlockVector->GetBufferImageGranularity(),
13893 &dstAllocRequest) &&
13895 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13897 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13900 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13901 (m_BytesMoved + size > maxBytesToMove))
13906 VmaDefragmentationMove move = {};
13907 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13908 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13909 move.srcOffset = srcOffset;
13910 move.dstOffset = dstAllocRequest.offset;
13912 move.hAllocation = allocInfo.m_hAllocation;
13913 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13914 move.pDstBlock = pDstBlockInfo->m_pBlock;
13916 moves.push_back(move);
13918 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13922 allocInfo.m_hAllocation);
13924 if(freeOldAllocations)
13926 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13927 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13930 if(allocInfo.m_pChanged != VMA_NULL)
13932 *allocInfo.m_pChanged = VK_TRUE;
13935 ++m_AllocationsMoved;
13936 m_BytesMoved += size;
13938 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13946 if(srcAllocIndex > 0)
13952 if(srcBlockIndex > 0)
13955 srcAllocIndex = SIZE_MAX;
13965 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13968 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13970 if(m_Blocks[i]->m_HasNonMovableAllocations)
13978 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13979 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13980 VkDeviceSize maxBytesToMove,
13981 uint32_t maxAllocationsToMove,
13984 if(!m_AllAllocations && m_AllocationCount == 0)
13989 const size_t blockCount = m_Blocks.size();
13990 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13992 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13994 if(m_AllAllocations)
13996 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13997 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13998 it != pMetadata->m_Suballocations.end();
14001 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14003 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14004 pBlockInfo->m_Allocations.push_back(allocInfo);
14009 pBlockInfo->CalcHasNonMovableAllocations();
14013 pBlockInfo->SortAllocationsByOffsetDescending();
14019 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14022 const uint32_t roundCount = 2;
14025 VkResult result = VK_SUCCESS;
14026 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14034 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14035 size_t dstBlockIndex, VkDeviceSize dstOffset,
14036 size_t srcBlockIndex, VkDeviceSize srcOffset)
14038 if(dstBlockIndex < srcBlockIndex)
14042 if(dstBlockIndex > srcBlockIndex)
14046 if(dstOffset < srcOffset)
14056 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14058 VmaBlockVector* pBlockVector,
14059 uint32_t currentFrameIndex,
14060 bool overlappingMoveSupported) :
14061 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14062 m_OverlappingMoveSupported(overlappingMoveSupported),
14063 m_AllocationCount(0),
14064 m_AllAllocations(false),
14066 m_AllocationsMoved(0),
14067 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14069 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14073 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14077 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14078 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14079 VkDeviceSize maxBytesToMove,
14080 uint32_t maxAllocationsToMove,
14083 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14085 const size_t blockCount = m_pBlockVector->GetBlockCount();
14086 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14091 PreprocessMetadata();
14095 m_BlockInfos.resize(blockCount);
14096 for(
size_t i = 0; i < blockCount; ++i)
14098 m_BlockInfos[i].origBlockIndex = i;
14101 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14102 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14103 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14108 FreeSpaceDatabase freeSpaceDb;
14110 size_t dstBlockInfoIndex = 0;
14111 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14112 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14113 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14114 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14115 VkDeviceSize dstOffset = 0;
14118 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14120 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14121 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14122 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14123 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14124 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14126 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14127 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14128 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14129 if(m_AllocationsMoved == maxAllocationsToMove ||
14130 m_BytesMoved + srcAllocSize > maxBytesToMove)
14135 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14137 VmaDefragmentationMove move = {};
14139 size_t freeSpaceInfoIndex;
14140 VkDeviceSize dstAllocOffset;
14141 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14142 freeSpaceInfoIndex, dstAllocOffset))
14144 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14145 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14146 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14149 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14151 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14155 VmaSuballocation suballoc = *srcSuballocIt;
14156 suballoc.offset = dstAllocOffset;
14157 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14158 m_BytesMoved += srcAllocSize;
14159 ++m_AllocationsMoved;
14161 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14163 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14164 srcSuballocIt = nextSuballocIt;
14166 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14168 move.srcBlockIndex = srcOrigBlockIndex;
14169 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14170 move.srcOffset = srcAllocOffset;
14171 move.dstOffset = dstAllocOffset;
14172 move.size = srcAllocSize;
14174 moves.push_back(move);
14181 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14183 VmaSuballocation suballoc = *srcSuballocIt;
14184 suballoc.offset = dstAllocOffset;
14185 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14186 m_BytesMoved += srcAllocSize;
14187 ++m_AllocationsMoved;
14189 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14191 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14192 srcSuballocIt = nextSuballocIt;
14194 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14196 move.srcBlockIndex = srcOrigBlockIndex;
14197 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14198 move.srcOffset = srcAllocOffset;
14199 move.dstOffset = dstAllocOffset;
14200 move.size = srcAllocSize;
14202 moves.push_back(move);
14207 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14210 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14211 dstAllocOffset + srcAllocSize > dstBlockSize)
14214 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14216 ++dstBlockInfoIndex;
14217 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14218 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14219 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14220 dstBlockSize = pDstMetadata->GetSize();
14222 dstAllocOffset = 0;
14226 if(dstBlockInfoIndex == srcBlockInfoIndex)
14228 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14230 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14232 bool skipOver = overlap;
14233 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14237 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14242 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14244 dstOffset = srcAllocOffset + srcAllocSize;
14250 srcSuballocIt->offset = dstAllocOffset;
14251 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14252 dstOffset = dstAllocOffset + srcAllocSize;
14253 m_BytesMoved += srcAllocSize;
14254 ++m_AllocationsMoved;
14257 move.srcBlockIndex = srcOrigBlockIndex;
14258 move.dstBlockIndex = dstOrigBlockIndex;
14259 move.srcOffset = srcAllocOffset;
14260 move.dstOffset = dstAllocOffset;
14261 move.size = srcAllocSize;
14263 moves.push_back(move);
14271 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14272 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14274 VmaSuballocation suballoc = *srcSuballocIt;
14275 suballoc.offset = dstAllocOffset;
14276 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14277 dstOffset = dstAllocOffset + srcAllocSize;
14278 m_BytesMoved += srcAllocSize;
14279 ++m_AllocationsMoved;
14281 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14283 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14284 srcSuballocIt = nextSuballocIt;
14286 pDstMetadata->m_Suballocations.push_back(suballoc);
14288 move.srcBlockIndex = srcOrigBlockIndex;
14289 move.dstBlockIndex = dstOrigBlockIndex;
14290 move.srcOffset = srcAllocOffset;
14291 move.dstOffset = dstAllocOffset;
14292 move.size = srcAllocSize;
14294 moves.push_back(move);
14300 m_BlockInfos.clear();
14302 PostprocessMetadata();
14307 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14309 const size_t blockCount = m_pBlockVector->GetBlockCount();
14310 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14312 VmaBlockMetadata_Generic*
const pMetadata =
14313 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14314 pMetadata->m_FreeCount = 0;
14315 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14316 pMetadata->m_FreeSuballocationsBySize.clear();
14317 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14318 it != pMetadata->m_Suballocations.end(); )
14320 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14322 VmaSuballocationList::iterator nextIt = it;
14324 pMetadata->m_Suballocations.erase(it);
14335 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14337 const size_t blockCount = m_pBlockVector->GetBlockCount();
14338 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14340 VmaBlockMetadata_Generic*
const pMetadata =
14341 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14342 const VkDeviceSize blockSize = pMetadata->GetSize();
14345 if(pMetadata->m_Suballocations.empty())
14347 pMetadata->m_FreeCount = 1;
14349 VmaSuballocation suballoc = {
14353 VMA_SUBALLOCATION_TYPE_FREE };
14354 pMetadata->m_Suballocations.push_back(suballoc);
14355 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14360 VkDeviceSize offset = 0;
14361 VmaSuballocationList::iterator it;
14362 for(it = pMetadata->m_Suballocations.begin();
14363 it != pMetadata->m_Suballocations.end();
14366 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14367 VMA_ASSERT(it->offset >= offset);
14370 if(it->offset > offset)
14372 ++pMetadata->m_FreeCount;
14373 const VkDeviceSize freeSize = it->offset - offset;
14374 VmaSuballocation suballoc = {
14378 VMA_SUBALLOCATION_TYPE_FREE };
14379 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14380 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14382 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14386 pMetadata->m_SumFreeSize -= it->size;
14387 offset = it->offset + it->size;
14391 if(offset < blockSize)
14393 ++pMetadata->m_FreeCount;
14394 const VkDeviceSize freeSize = blockSize - offset;
14395 VmaSuballocation suballoc = {
14399 VMA_SUBALLOCATION_TYPE_FREE };
14400 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14401 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14402 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14404 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14409 pMetadata->m_FreeSuballocationsBySize.begin(),
14410 pMetadata->m_FreeSuballocationsBySize.end(),
14411 VmaSuballocationItemSizeLess());
14414 VMA_HEAVY_ASSERT(pMetadata->Validate());
14418 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14421 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14422 while(it != pMetadata->m_Suballocations.end())
14424 if(it->offset < suballoc.offset)
14429 pMetadata->m_Suballocations.insert(it, suballoc);
14435 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14438 VmaBlockVector* pBlockVector,
14439 uint32_t currFrameIndex) :
14441 mutexLocked(false),
14442 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14443 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14444 defragmentationMovesProcessed(0),
14445 defragmentationMovesCommitted(0),
14446 hasDefragmentationPlan(0),
14447 m_hAllocator(hAllocator),
14448 m_hCustomPool(hCustomPool),
14449 m_pBlockVector(pBlockVector),
14450 m_CurrFrameIndex(currFrameIndex),
14451 m_pAlgorithm(VMA_NULL),
14452 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14453 m_AllAllocations(false)
14457 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14459 vma_delete(m_hAllocator, m_pAlgorithm);
14462 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14464 AllocInfo info = { hAlloc, pChanged };
14465 m_Allocations.push_back(info);
14468 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14470 const bool allAllocations = m_AllAllocations ||
14471 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14484 if(VMA_DEBUG_MARGIN == 0 &&
14486 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14489 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14490 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14494 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14495 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14500 m_pAlgorithm->AddAll();
14504 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14506 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14514 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14516 uint32_t currFrameIndex,
14519 m_hAllocator(hAllocator),
14520 m_CurrFrameIndex(currFrameIndex),
14523 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14525 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14528 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14530 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14532 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14533 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14534 vma_delete(m_hAllocator, pBlockVectorCtx);
14536 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14538 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14539 if(pBlockVectorCtx)
14541 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14542 vma_delete(m_hAllocator, pBlockVectorCtx);
14547 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14549 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14551 VmaPool pool = pPools[poolIndex];
14554 if(pool->m_BlockVector.GetAlgorithm() == 0)
14556 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14558 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14560 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14562 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14567 if(!pBlockVectorDefragCtx)
14569 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14572 &pool->m_BlockVector,
14574 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14577 pBlockVectorDefragCtx->AddAll();
14582 void VmaDefragmentationContext_T::AddAllocations(
14583 uint32_t allocationCount,
14585 VkBool32* pAllocationsChanged)
14588 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14591 VMA_ASSERT(hAlloc);
14593 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14595 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14597 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14599 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14601 if(hAllocPool != VK_NULL_HANDLE)
14604 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14606 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14608 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14610 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14614 if(!pBlockVectorDefragCtx)
14616 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14619 &hAllocPool->m_BlockVector,
14621 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14628 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14629 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14630 if(!pBlockVectorDefragCtx)
14632 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14635 m_hAllocator->m_pBlockVectors[memTypeIndex],
14637 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14641 if(pBlockVectorDefragCtx)
14643 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14644 &pAllocationsChanged[allocIndex] : VMA_NULL;
14645 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14651 VkResult VmaDefragmentationContext_T::Defragment(
14652 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14653 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14665 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14666 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14668 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14669 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14671 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14672 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14675 return VK_NOT_READY;
14678 if(commandBuffer == VK_NULL_HANDLE)
14680 maxGpuBytesToMove = 0;
14681 maxGpuAllocationsToMove = 0;
14684 VkResult res = VK_SUCCESS;
14687 for(uint32_t memTypeIndex = 0;
14688 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14691 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14692 if(pBlockVectorCtx)
14694 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14695 pBlockVectorCtx->GetBlockVector()->Defragment(
14698 maxCpuBytesToMove, maxCpuAllocationsToMove,
14699 maxGpuBytesToMove, maxGpuAllocationsToMove,
14701 if(pBlockVectorCtx->res != VK_SUCCESS)
14703 res = pBlockVectorCtx->res;
14709 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14710 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14713 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14714 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14715 pBlockVectorCtx->GetBlockVector()->Defragment(
14718 maxCpuBytesToMove, maxCpuAllocationsToMove,
14719 maxGpuBytesToMove, maxGpuAllocationsToMove,
14721 if(pBlockVectorCtx->res != VK_SUCCESS)
14723 res = pBlockVectorCtx->res;
14736 for(uint32_t memTypeIndex = 0;
14737 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14740 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14741 if(pBlockVectorCtx)
14743 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14745 if(!pBlockVectorCtx->hasDefragmentationPlan)
14747 pBlockVectorCtx->GetBlockVector()->Defragment(
14750 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14751 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14754 if(pBlockVectorCtx->res < VK_SUCCESS)
14757 pBlockVectorCtx->hasDefragmentationPlan =
true;
14760 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14762 pCurrentMove, movesLeft);
14764 movesLeft -= processed;
14765 pCurrentMove += processed;
14770 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14771 customCtxIndex < customCtxCount;
14774 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14775 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14777 if(!pBlockVectorCtx->hasDefragmentationPlan)
14779 pBlockVectorCtx->GetBlockVector()->Defragment(
14782 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14783 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14786 if(pBlockVectorCtx->res < VK_SUCCESS)
14789 pBlockVectorCtx->hasDefragmentationPlan =
true;
14792 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14794 pCurrentMove, movesLeft);
14796 movesLeft -= processed;
14797 pCurrentMove += processed;
14804 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14806 VkResult res = VK_SUCCESS;
14809 for(uint32_t memTypeIndex = 0;
14810 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14813 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14814 if(pBlockVectorCtx)
14816 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14818 if(!pBlockVectorCtx->hasDefragmentationPlan)
14820 res = VK_NOT_READY;
14824 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14825 pBlockVectorCtx, m_pStats);
14827 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14828 res = VK_NOT_READY;
14833 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14834 customCtxIndex < customCtxCount;
14837 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14838 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14840 if(!pBlockVectorCtx->hasDefragmentationPlan)
14842 res = VK_NOT_READY;
14846 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14847 pBlockVectorCtx, m_pStats);
14849 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14850 res = VK_NOT_READY;
14859 #if VMA_RECORDING_ENABLED
14861 VmaRecorder::VmaRecorder() :
14865 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
14871 m_UseMutex = useMutex;
14872 m_Flags = settings.
flags;
14874 #if defined(_WIN32)
14876 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14880 return VK_ERROR_INITIALIZATION_FAILED;
14884 m_File = fopen(settings.
pFilePath,
"wb");
14888 return VK_ERROR_INITIALIZATION_FAILED;
14893 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14894 fprintf(m_File,
"%s\n",
"1,8");
14899 VmaRecorder::~VmaRecorder()
14901 if(m_File != VMA_NULL)
14907 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14909 CallParams callParams;
14910 GetBasicParams(callParams);
14912 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14913 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14917 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14919 CallParams callParams;
14920 GetBasicParams(callParams);
14922 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14923 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14929 CallParams callParams;
14930 GetBasicParams(callParams);
14932 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14933 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14944 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14946 CallParams callParams;
14947 GetBasicParams(callParams);
14949 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14950 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14955 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14956 const VkMemoryRequirements& vkMemReq,
14960 CallParams callParams;
14961 GetBasicParams(callParams);
14963 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14964 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14965 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14967 vkMemReq.alignment,
14968 vkMemReq.memoryTypeBits,
14976 userDataStr.GetString());
14980 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14981 const VkMemoryRequirements& vkMemReq,
14983 uint64_t allocationCount,
14986 CallParams callParams;
14987 GetBasicParams(callParams);
14989 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14990 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14991 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14993 vkMemReq.alignment,
14994 vkMemReq.memoryTypeBits,
15001 PrintPointerList(allocationCount, pAllocations);
15002 fprintf(m_File,
",%s\n", userDataStr.GetString());
15006 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15007 const VkMemoryRequirements& vkMemReq,
15008 bool requiresDedicatedAllocation,
15009 bool prefersDedicatedAllocation,
15013 CallParams callParams;
15014 GetBasicParams(callParams);
15016 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15017 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15018 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15020 vkMemReq.alignment,
15021 vkMemReq.memoryTypeBits,
15022 requiresDedicatedAllocation ? 1 : 0,
15023 prefersDedicatedAllocation ? 1 : 0,
15031 userDataStr.GetString());
15035 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15036 const VkMemoryRequirements& vkMemReq,
15037 bool requiresDedicatedAllocation,
15038 bool prefersDedicatedAllocation,
15042 CallParams callParams;
15043 GetBasicParams(callParams);
15045 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15046 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15047 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15049 vkMemReq.alignment,
15050 vkMemReq.memoryTypeBits,
15051 requiresDedicatedAllocation ? 1 : 0,
15052 prefersDedicatedAllocation ? 1 : 0,
15060 userDataStr.GetString());
15064 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15067 CallParams callParams;
15068 GetBasicParams(callParams);
15070 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15071 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15076 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15077 uint64_t allocationCount,
15080 CallParams callParams;
15081 GetBasicParams(callParams);
15083 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15084 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15085 PrintPointerList(allocationCount, pAllocations);
15086 fprintf(m_File,
"\n");
15090 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15092 const void* pUserData)
15094 CallParams callParams;
15095 GetBasicParams(callParams);
15097 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15098 UserDataString userDataStr(
15101 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15103 userDataStr.GetString());
15107 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15110 CallParams callParams;
15111 GetBasicParams(callParams);
15113 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15114 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15119 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15122 CallParams callParams;
15123 GetBasicParams(callParams);
15125 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15126 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15131 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15134 CallParams callParams;
15135 GetBasicParams(callParams);
15137 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15138 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15143 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15144 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15146 CallParams callParams;
15147 GetBasicParams(callParams);
15149 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15150 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15157 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15158 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15160 CallParams callParams;
15161 GetBasicParams(callParams);
15163 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15164 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15171 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15172 const VkBufferCreateInfo& bufCreateInfo,
15176 CallParams callParams;
15177 GetBasicParams(callParams);
15179 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15180 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15181 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15182 bufCreateInfo.flags,
15183 bufCreateInfo.size,
15184 bufCreateInfo.usage,
15185 bufCreateInfo.sharingMode,
15186 allocCreateInfo.
flags,
15187 allocCreateInfo.
usage,
15191 allocCreateInfo.
pool,
15193 userDataStr.GetString());
15197 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15198 const VkImageCreateInfo& imageCreateInfo,
15202 CallParams callParams;
15203 GetBasicParams(callParams);
15205 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15206 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15207 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15208 imageCreateInfo.flags,
15209 imageCreateInfo.imageType,
15210 imageCreateInfo.format,
15211 imageCreateInfo.extent.width,
15212 imageCreateInfo.extent.height,
15213 imageCreateInfo.extent.depth,
15214 imageCreateInfo.mipLevels,
15215 imageCreateInfo.arrayLayers,
15216 imageCreateInfo.samples,
15217 imageCreateInfo.tiling,
15218 imageCreateInfo.usage,
15219 imageCreateInfo.sharingMode,
15220 imageCreateInfo.initialLayout,
15221 allocCreateInfo.
flags,
15222 allocCreateInfo.
usage,
15226 allocCreateInfo.
pool,
15228 userDataStr.GetString());
15232 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15235 CallParams callParams;
15236 GetBasicParams(callParams);
15238 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15239 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15244 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15247 CallParams callParams;
15248 GetBasicParams(callParams);
15250 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15251 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15256 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15259 CallParams callParams;
15260 GetBasicParams(callParams);
15262 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15263 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15268 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15271 CallParams callParams;
15272 GetBasicParams(callParams);
15274 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15275 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15280 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15283 CallParams callParams;
15284 GetBasicParams(callParams);
15286 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15287 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15292 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15296 CallParams callParams;
15297 GetBasicParams(callParams);
15299 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15300 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15303 fprintf(m_File,
",");
15305 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15315 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15318 CallParams callParams;
15319 GetBasicParams(callParams);
15321 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15322 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15327 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15331 CallParams callParams;
15332 GetBasicParams(callParams);
15334 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15335 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15336 pool, name != VMA_NULL ? name :
"");
15342 if(pUserData != VMA_NULL)
15346 m_Str = (
const char*)pUserData;
15351 snprintf(m_PtrStr, 17,
"%p", pUserData);
15361 void VmaRecorder::WriteConfiguration(
15362 const VkPhysicalDeviceProperties& devProps,
15363 const VkPhysicalDeviceMemoryProperties& memProps,
15364 uint32_t vulkanApiVersion,
15365 bool dedicatedAllocationExtensionEnabled,
15366 bool bindMemory2ExtensionEnabled,
15367 bool memoryBudgetExtensionEnabled,
15368 bool deviceCoherentMemoryExtensionEnabled)
15370 fprintf(m_File,
"Config,Begin\n");
15372 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15374 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15375 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15376 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15377 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15378 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15379 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15381 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15382 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15383 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15385 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15386 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15388 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15389 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15391 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15392 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15394 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15395 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15398 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15399 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15400 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15401 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15403 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15404 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15405 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15406 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15407 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15408 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15409 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15410 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15411 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15413 fprintf(m_File,
"Config,End\n");
15416 void VmaRecorder::GetBasicParams(CallParams& outParams)
15418 #if defined(_WIN32)
15419 outParams.threadId = GetCurrentThreadId();
15424 std::thread::id thread_id = std::this_thread::get_id();
15425 stringstream thread_id_to_string_converter;
15426 thread_id_to_string_converter << thread_id;
15427 string thread_id_as_string = thread_id_to_string_converter.str();
15428 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15431 auto current_time = std::chrono::high_resolution_clock::now();
15433 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15436 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15440 fprintf(m_File,
"%p", pItems[0]);
15441 for(uint64_t i = 1; i < count; ++i)
15443 fprintf(m_File,
" %p", pItems[i]);
15448 void VmaRecorder::Flush()
15456 #endif // #if VMA_RECORDING_ENABLED
15461 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15462 m_Allocator(pAllocationCallbacks, 1024)
15466 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15468 VmaMutexLock mutexLock(m_Mutex);
15469 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15472 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15474 VmaMutexLock mutexLock(m_Mutex);
15475 m_Allocator.Free(hAlloc);
15483 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15489 m_hDevice(pCreateInfo->device),
15490 m_hInstance(pCreateInfo->instance),
15491 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15492 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15493 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15494 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15495 m_HeapSizeLimitMask(0),
15496 m_PreferredLargeHeapBlockSize(0),
15497 m_PhysicalDevice(pCreateInfo->physicalDevice),
15498 m_CurrentFrameIndex(0),
15499 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15500 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15502 m_GlobalMemoryTypeBits(UINT32_MAX)
15504 ,m_pRecorder(VMA_NULL)
15507 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15509 m_UseKhrDedicatedAllocation =
false;
15510 m_UseKhrBindMemory2 =
false;
15513 if(VMA_DEBUG_DETECT_CORRUPTION)
15516 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15521 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15523 #if !(VMA_DEDICATED_ALLOCATION)
15526 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15529 #if !(VMA_BIND_MEMORY2)
15532 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15536 #if !(VMA_MEMORY_BUDGET)
15539 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15542 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15543 if(m_UseKhrBufferDeviceAddress)
15545 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15548 #if VMA_VULKAN_VERSION < 1002000
15549 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15551 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15554 #if VMA_VULKAN_VERSION < 1001000
15555 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15557 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15561 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15562 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15563 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15565 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15566 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15567 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15578 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15579 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15581 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15582 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15583 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15584 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15589 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15593 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15595 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15596 if(limit != VK_WHOLE_SIZE)
15598 m_HeapSizeLimitMask |= 1u << heapIndex;
15599 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15601 m_MemProps.memoryHeaps[heapIndex].size = limit;
15607 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15609 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15611 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15615 preferredBlockSize,
15618 GetBufferImageGranularity(),
15624 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15631 VkResult res = VK_SUCCESS;
15636 #if VMA_RECORDING_ENABLED
15637 m_pRecorder = vma_new(
this, VmaRecorder)();
15639 if(res != VK_SUCCESS)
15643 m_pRecorder->WriteConfiguration(
15644 m_PhysicalDeviceProperties,
15646 m_VulkanApiVersion,
15647 m_UseKhrDedicatedAllocation,
15648 m_UseKhrBindMemory2,
15649 m_UseExtMemoryBudget,
15650 m_UseAmdDeviceCoherentMemory);
15651 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15653 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15654 return VK_ERROR_FEATURE_NOT_PRESENT;
15658 #if VMA_MEMORY_BUDGET
15659 if(m_UseExtMemoryBudget)
15661 UpdateVulkanBudget();
15663 #endif // #if VMA_MEMORY_BUDGET
15668 VmaAllocator_T::~VmaAllocator_T()
15670 #if VMA_RECORDING_ENABLED
15671 if(m_pRecorder != VMA_NULL)
15673 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15674 vma_delete(
this, m_pRecorder);
15678 VMA_ASSERT(m_Pools.empty());
15680 for(
size_t i = GetMemoryTypeCount(); i--; )
15682 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15684 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15687 vma_delete(
this, m_pDedicatedAllocations[i]);
15688 vma_delete(
this, m_pBlockVectors[i]);
15692 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15694 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15695 ImportVulkanFunctions_Static();
15698 if(pVulkanFunctions != VMA_NULL)
15700 ImportVulkanFunctions_Custom(pVulkanFunctions);
15703 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15704 ImportVulkanFunctions_Dynamic();
15707 ValidateVulkanFunctions();
15710 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15712 void VmaAllocator_T::ImportVulkanFunctions_Static()
15715 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15716 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15717 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15718 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15719 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15720 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15721 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15722 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15723 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15724 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15725 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15726 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15727 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15728 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15729 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15730 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15731 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15734 #if VMA_VULKAN_VERSION >= 1001000
15735 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15737 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15738 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15739 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15740 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15741 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15746 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15748 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15750 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15752 #define VMA_COPY_IF_NOT_NULL(funcName) \
15753 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15755 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15756 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15757 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15758 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15759 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15760 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15761 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15762 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15763 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15764 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15765 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15766 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15767 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15768 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15769 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15770 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15771 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15773 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15774 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15775 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15778 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15779 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15780 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15783 #if VMA_MEMORY_BUDGET
15784 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15787 #undef VMA_COPY_IF_NOT_NULL
15790 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15792 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15794 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15795 if(m_VulkanFunctions.memberName == VMA_NULL) \
15796 m_VulkanFunctions.memberName = \
15797 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15798 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15799 if(m_VulkanFunctions.memberName == VMA_NULL) \
15800 m_VulkanFunctions.memberName = \
15801 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15803 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15804 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15805 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15806 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15807 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15808 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15809 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15810 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15811 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15812 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15813 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15814 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15815 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15816 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15817 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15818 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15819 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
15821 #if VMA_VULKAN_VERSION >= 1001000
15822 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15824 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
15825 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
15826 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
15827 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
15828 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
15832 #if VMA_DEDICATED_ALLOCATION
15833 if(m_UseKhrDedicatedAllocation)
15835 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
15836 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
15840 #if VMA_BIND_MEMORY2
15841 if(m_UseKhrBindMemory2)
15843 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
15844 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
15846 #endif // #if VMA_BIND_MEMORY2
15848 #if VMA_MEMORY_BUDGET
15849 if(m_UseExtMemoryBudget)
15851 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15853 #endif // #if VMA_MEMORY_BUDGET
15855 #undef VMA_FETCH_DEVICE_FUNC
15856 #undef VMA_FETCH_INSTANCE_FUNC
15859 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15861 void VmaAllocator_T::ValidateVulkanFunctions()
15863 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15864 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15865 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15866 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15867 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15868 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15869 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15870 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15871 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15872 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15873 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15874 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15875 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15876 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15877 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15878 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15879 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15881 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15882 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15884 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15885 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15889 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15890 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15892 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15893 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15897 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15898 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15900 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15905 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15907 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15908 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15909 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15910 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15913 VkResult VmaAllocator_T::AllocateMemoryOfType(
15915 VkDeviceSize alignment,
15916 bool dedicatedAllocation,
15917 VkBuffer dedicatedBuffer,
15918 VkBufferUsageFlags dedicatedBufferUsage,
15919 VkImage dedicatedImage,
15921 uint32_t memTypeIndex,
15922 VmaSuballocationType suballocType,
15923 size_t allocationCount,
15926 VMA_ASSERT(pAllocations != VMA_NULL);
15927 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15933 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15943 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15944 VMA_ASSERT(blockVector);
15946 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15947 bool preferDedicatedMemory =
15948 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15949 dedicatedAllocation ||
15951 size > preferredBlockSize / 2;
15953 if(preferDedicatedMemory &&
15955 finalCreateInfo.
pool == VK_NULL_HANDLE)
15964 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15968 return AllocateDedicatedMemory(
15977 dedicatedBufferUsage,
15985 VkResult res = blockVector->Allocate(
15986 m_CurrentFrameIndex.load(),
15993 if(res == VK_SUCCESS)
16001 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16005 res = AllocateDedicatedMemory(
16014 dedicatedBufferUsage,
16018 if(res == VK_SUCCESS)
16021 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16027 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16034 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16036 VmaSuballocationType suballocType,
16037 uint32_t memTypeIndex,
16040 bool isUserDataString,
16042 VkBuffer dedicatedBuffer,
16043 VkBufferUsageFlags dedicatedBufferUsage,
16044 VkImage dedicatedImage,
16045 size_t allocationCount,
16048 VMA_ASSERT(allocationCount > 0 && pAllocations);
16052 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16054 GetBudget(&heapBudget, heapIndex, 1);
16055 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16057 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16061 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16062 allocInfo.memoryTypeIndex = memTypeIndex;
16063 allocInfo.allocationSize = size;
16065 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16066 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16067 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16069 if(dedicatedBuffer != VK_NULL_HANDLE)
16071 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16072 dedicatedAllocInfo.buffer = dedicatedBuffer;
16073 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16075 else if(dedicatedImage != VK_NULL_HANDLE)
16077 dedicatedAllocInfo.image = dedicatedImage;
16078 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16081 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16083 #if VMA_BUFFER_DEVICE_ADDRESS
16084 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16085 if(m_UseKhrBufferDeviceAddress)
16087 bool canContainBufferWithDeviceAddress =
true;
16088 if(dedicatedBuffer != VK_NULL_HANDLE)
16090 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16091 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16093 else if(dedicatedImage != VK_NULL_HANDLE)
16095 canContainBufferWithDeviceAddress =
false;
16097 if(canContainBufferWithDeviceAddress)
16099 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16100 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16103 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
16106 VkResult res = VK_SUCCESS;
16107 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16109 res = AllocateDedicatedMemoryPage(
16117 pAllocations + allocIndex);
16118 if(res != VK_SUCCESS)
16124 if(res == VK_SUCCESS)
16128 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16129 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16130 VMA_ASSERT(pDedicatedAllocations);
16131 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16133 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
16137 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16142 while(allocIndex--)
16145 VkDeviceMemory hMemory = currAlloc->GetMemory();
16157 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16158 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16159 currAlloc->SetUserData(
this, VMA_NULL);
16160 m_AllocationObjectAllocator.Free(currAlloc);
16163 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16169 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16171 VmaSuballocationType suballocType,
16172 uint32_t memTypeIndex,
16173 const VkMemoryAllocateInfo& allocInfo,
16175 bool isUserDataString,
16179 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16180 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16183 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16187 void* pMappedData = VMA_NULL;
16190 res = (*m_VulkanFunctions.vkMapMemory)(
16199 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16200 FreeVulkanMemory(memTypeIndex, size, hMemory);
16205 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16206 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16207 (*pAllocation)->SetUserData(
this, pUserData);
16208 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16209 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16211 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16217 void VmaAllocator_T::GetBufferMemoryRequirements(
16219 VkMemoryRequirements& memReq,
16220 bool& requiresDedicatedAllocation,
16221 bool& prefersDedicatedAllocation)
const
16223 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16224 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16226 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16227 memReqInfo.buffer = hBuffer;
16229 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16231 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16232 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16234 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16236 memReq = memReq2.memoryRequirements;
16237 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16238 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16241 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16243 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16244 requiresDedicatedAllocation =
false;
16245 prefersDedicatedAllocation =
false;
16249 void VmaAllocator_T::GetImageMemoryRequirements(
16251 VkMemoryRequirements& memReq,
16252 bool& requiresDedicatedAllocation,
16253 bool& prefersDedicatedAllocation)
const
16255 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16256 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16258 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16259 memReqInfo.image = hImage;
16261 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16263 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16264 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16266 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16268 memReq = memReq2.memoryRequirements;
16269 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16270 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16273 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16275 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16276 requiresDedicatedAllocation =
false;
16277 prefersDedicatedAllocation =
false;
16281 VkResult VmaAllocator_T::AllocateMemory(
16282 const VkMemoryRequirements& vkMemReq,
16283 bool requiresDedicatedAllocation,
16284 bool prefersDedicatedAllocation,
16285 VkBuffer dedicatedBuffer,
16286 VkBufferUsageFlags dedicatedBufferUsage,
16287 VkImage dedicatedImage,
16289 VmaSuballocationType suballocType,
16290 size_t allocationCount,
16293 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16295 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16297 if(vkMemReq.size == 0)
16299 return VK_ERROR_VALIDATION_FAILED_EXT;
16304 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16305 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16310 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16311 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16313 if(requiresDedicatedAllocation)
16317 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16318 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16320 if(createInfo.
pool != VK_NULL_HANDLE)
16322 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16323 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16326 if((createInfo.
pool != VK_NULL_HANDLE) &&
16329 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16330 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16333 if(createInfo.
pool != VK_NULL_HANDLE)
16335 const VkDeviceSize alignmentForPool = VMA_MAX(
16336 vkMemReq.alignment,
16337 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16342 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16347 return createInfo.
pool->m_BlockVector.Allocate(
16348 m_CurrentFrameIndex.load(),
16359 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16360 uint32_t memTypeIndex = UINT32_MAX;
16362 if(res == VK_SUCCESS)
16364 VkDeviceSize alignmentForMemType = VMA_MAX(
16365 vkMemReq.alignment,
16366 GetMemoryTypeMinAlignment(memTypeIndex));
16368 res = AllocateMemoryOfType(
16370 alignmentForMemType,
16371 requiresDedicatedAllocation || prefersDedicatedAllocation,
16373 dedicatedBufferUsage,
16381 if(res == VK_SUCCESS)
16391 memoryTypeBits &= ~(1u << memTypeIndex);
16394 if(res == VK_SUCCESS)
16396 alignmentForMemType = VMA_MAX(
16397 vkMemReq.alignment,
16398 GetMemoryTypeMinAlignment(memTypeIndex));
16400 res = AllocateMemoryOfType(
16402 alignmentForMemType,
16403 requiresDedicatedAllocation || prefersDedicatedAllocation,
16405 dedicatedBufferUsage,
16413 if(res == VK_SUCCESS)
16423 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16434 void VmaAllocator_T::FreeMemory(
16435 size_t allocationCount,
16438 VMA_ASSERT(pAllocations);
16440 for(
size_t allocIndex = allocationCount; allocIndex--; )
16444 if(allocation != VK_NULL_HANDLE)
16446 if(TouchAllocation(allocation))
16448 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16450 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16453 switch(allocation->GetType())
16455 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16457 VmaBlockVector* pBlockVector = VMA_NULL;
16458 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16459 if(hPool != VK_NULL_HANDLE)
16461 pBlockVector = &hPool->m_BlockVector;
16465 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16466 pBlockVector = m_pBlockVectors[memTypeIndex];
16468 pBlockVector->Free(allocation);
16471 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16472 FreeDedicatedMemory(allocation);
16480 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16481 allocation->SetUserData(
this, VMA_NULL);
16482 m_AllocationObjectAllocator.Free(allocation);
16487 VkResult VmaAllocator_T::ResizeAllocation(
16489 VkDeviceSize newSize)
16492 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16494 return VK_ERROR_VALIDATION_FAILED_EXT;
16496 if(newSize == alloc->GetSize())
16500 return VK_ERROR_OUT_OF_POOL_MEMORY;
16503 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16506 InitStatInfo(pStats->
total);
16507 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16509 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16513 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16515 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16516 VMA_ASSERT(pBlockVector);
16517 pBlockVector->AddStats(pStats);
16522 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16523 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16525 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16530 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16532 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16533 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16534 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16535 VMA_ASSERT(pDedicatedAllocVector);
16536 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16539 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16540 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16541 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16542 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16547 VmaPostprocessCalcStatInfo(pStats->
total);
16548 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16549 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16550 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16551 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16554 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16556 #if VMA_MEMORY_BUDGET
16557 if(m_UseExtMemoryBudget)
16559 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16561 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16562 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16564 const uint32_t heapIndex = firstHeap + i;
16566 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16569 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16571 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16572 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16576 outBudget->
usage = 0;
16580 outBudget->
budget = VMA_MIN(
16581 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16586 UpdateVulkanBudget();
16587 GetBudget(outBudget, firstHeap, heapCount);
16593 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16595 const uint32_t heapIndex = firstHeap + i;
16597 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16601 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16606 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16608 VkResult VmaAllocator_T::DefragmentationBegin(
16618 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16619 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16622 (*pContext)->AddAllocations(
16625 VkResult res = (*pContext)->Defragment(
16630 if(res != VK_NOT_READY)
16632 vma_delete(
this, *pContext);
16633 *pContext = VMA_NULL;
16639 VkResult VmaAllocator_T::DefragmentationEnd(
16642 vma_delete(
this, context);
16646 VkResult VmaAllocator_T::DefragmentationPassBegin(
16650 return context->DefragmentPassBegin(pInfo);
16652 VkResult VmaAllocator_T::DefragmentationPassEnd(
16655 return context->DefragmentPassEnd();
16661 if(hAllocation->CanBecomeLost())
16667 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16668 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16671 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16675 pAllocationInfo->
offset = 0;
16676 pAllocationInfo->
size = hAllocation->GetSize();
16678 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16681 else if(localLastUseFrameIndex == localCurrFrameIndex)
16683 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16684 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16685 pAllocationInfo->
offset = hAllocation->GetOffset();
16686 pAllocationInfo->
size = hAllocation->GetSize();
16688 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16693 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16695 localLastUseFrameIndex = localCurrFrameIndex;
16702 #if VMA_STATS_STRING_ENABLED
16703 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16704 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16707 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16708 if(localLastUseFrameIndex == localCurrFrameIndex)
16714 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16716 localLastUseFrameIndex = localCurrFrameIndex;
16722 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16723 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16724 pAllocationInfo->
offset = hAllocation->GetOffset();
16725 pAllocationInfo->
size = hAllocation->GetSize();
16726 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16727 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16731 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16734 if(hAllocation->CanBecomeLost())
16736 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16737 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16740 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16744 else if(localLastUseFrameIndex == localCurrFrameIndex)
16750 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16752 localLastUseFrameIndex = localCurrFrameIndex;
16759 #if VMA_STATS_STRING_ENABLED
16760 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16761 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16764 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16765 if(localLastUseFrameIndex == localCurrFrameIndex)
16771 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16773 localLastUseFrameIndex = localCurrFrameIndex;
16785 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16795 return VK_ERROR_INITIALIZATION_FAILED;
16799 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16801 return VK_ERROR_FEATURE_NOT_PRESENT;
16804 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16806 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16808 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16809 if(res != VK_SUCCESS)
16811 vma_delete(
this, *pPool);
16818 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16819 (*pPool)->SetId(m_NextPoolId++);
16820 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16826 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16830 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16831 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16832 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16835 vma_delete(
this, pool);
16840 pool->m_BlockVector.GetPoolStats(pPoolStats);
16843 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16845 m_CurrentFrameIndex.store(frameIndex);
16847 #if VMA_MEMORY_BUDGET
16848 if(m_UseExtMemoryBudget)
16850 UpdateVulkanBudget();
16852 #endif // #if VMA_MEMORY_BUDGET
16855 void VmaAllocator_T::MakePoolAllocationsLost(
16857 size_t* pLostAllocationCount)
16859 hPool->m_BlockVector.MakePoolAllocationsLost(
16860 m_CurrentFrameIndex.load(),
16861 pLostAllocationCount);
16864 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16866 return hPool->m_BlockVector.CheckCorruption();
16869 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16871 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16874 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16876 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16878 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16879 VMA_ASSERT(pBlockVector);
16880 VkResult localRes = pBlockVector->CheckCorruption();
16883 case VK_ERROR_FEATURE_NOT_PRESENT:
16886 finalRes = VK_SUCCESS;
16896 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16897 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16899 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16901 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16904 case VK_ERROR_FEATURE_NOT_PRESENT:
16907 finalRes = VK_SUCCESS;
16919 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16921 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16922 (*pAllocation)->InitLost();
16925 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16927 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16930 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16932 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16933 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16936 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16937 if(blockBytesAfterAllocation > heapSize)
16939 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16941 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16949 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16953 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16955 if(res == VK_SUCCESS)
16957 #if VMA_MEMORY_BUDGET
16958 ++m_Budget.m_OperationsSinceBudgetFetch;
16962 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16964 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
16969 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16975 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16978 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16980 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
16984 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16986 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16989 VkResult VmaAllocator_T::BindVulkanBuffer(
16990 VkDeviceMemory memory,
16991 VkDeviceSize memoryOffset,
16995 if(pNext != VMA_NULL)
16997 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16998 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16999 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17001 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17002 bindBufferMemoryInfo.pNext = pNext;
17003 bindBufferMemoryInfo.buffer = buffer;
17004 bindBufferMemoryInfo.memory = memory;
17005 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17006 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17009 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17011 return VK_ERROR_EXTENSION_NOT_PRESENT;
17016 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17020 VkResult VmaAllocator_T::BindVulkanImage(
17021 VkDeviceMemory memory,
17022 VkDeviceSize memoryOffset,
17026 if(pNext != VMA_NULL)
17028 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17029 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17030 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17032 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17033 bindBufferMemoryInfo.pNext = pNext;
17034 bindBufferMemoryInfo.image = image;
17035 bindBufferMemoryInfo.memory = memory;
17036 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17037 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17040 #endif // #if VMA_BIND_MEMORY2
17042 return VK_ERROR_EXTENSION_NOT_PRESENT;
17047 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17051 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17053 if(hAllocation->CanBecomeLost())
17055 return VK_ERROR_MEMORY_MAP_FAILED;
17058 switch(hAllocation->GetType())
17060 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17062 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17063 char *pBytes = VMA_NULL;
17064 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17065 if(res == VK_SUCCESS)
17067 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17068 hAllocation->BlockAllocMap();
17072 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17073 return hAllocation->DedicatedAllocMap(
this, ppData);
17076 return VK_ERROR_MEMORY_MAP_FAILED;
17082 switch(hAllocation->GetType())
17084 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17086 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17087 hAllocation->BlockAllocUnmap();
17088 pBlock->Unmap(
this, 1);
17091 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17092 hAllocation->DedicatedAllocUnmap(
this);
17099 VkResult VmaAllocator_T::BindBufferMemory(
17101 VkDeviceSize allocationLocalOffset,
17105 VkResult res = VK_SUCCESS;
17106 switch(hAllocation->GetType())
17108 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17109 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17111 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17113 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17114 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17115 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17124 VkResult VmaAllocator_T::BindImageMemory(
17126 VkDeviceSize allocationLocalOffset,
17130 VkResult res = VK_SUCCESS;
17131 switch(hAllocation->GetType())
17133 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17134 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17136 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17138 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17139 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17140 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17149 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17151 VkDeviceSize offset, VkDeviceSize size,
17152 VMA_CACHE_OPERATION op)
17154 VkResult res = VK_SUCCESS;
17156 VkMappedMemoryRange memRange = {};
17157 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17161 case VMA_CACHE_FLUSH:
17162 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17164 case VMA_CACHE_INVALIDATE:
17165 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17175 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17176 uint32_t allocationCount,
17178 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17179 VMA_CACHE_OPERATION op)
17181 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17182 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17183 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17185 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17188 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17189 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17190 VkMappedMemoryRange newRange;
17191 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17193 ranges.push_back(newRange);
17197 VkResult res = VK_SUCCESS;
17198 if(!ranges.empty())
17202 case VMA_CACHE_FLUSH:
17203 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17205 case VMA_CACHE_INVALIDATE:
17206 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17216 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17218 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17220 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17222 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17223 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
17224 VMA_ASSERT(pDedicatedAllocations);
17225 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
17226 VMA_ASSERT(success);
17229 VkDeviceMemory hMemory = allocation->GetMemory();
17241 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17243 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17246 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17248 VkBufferCreateInfo dummyBufCreateInfo;
17249 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17251 uint32_t memoryTypeBits = 0;
17254 VkBuffer buf = VK_NULL_HANDLE;
17255 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17256 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17257 if(res == VK_SUCCESS)
17260 VkMemoryRequirements memReq;
17261 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17262 memoryTypeBits = memReq.memoryTypeBits;
17265 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17268 return memoryTypeBits;
17271 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17274 VMA_ASSERT(GetMemoryTypeCount() > 0);
17276 uint32_t memoryTypeBits = UINT32_MAX;
17278 if(!m_UseAmdDeviceCoherentMemory)
17281 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17283 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17285 memoryTypeBits &= ~(1u << memTypeIndex);
17290 return memoryTypeBits;
17293 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17295 VkDeviceSize offset, VkDeviceSize size,
17296 VkMappedMemoryRange& outRange)
const
17298 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17299 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17301 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17302 const VkDeviceSize allocationSize = allocation->GetSize();
17303 VMA_ASSERT(offset <= allocationSize);
17305 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17306 outRange.pNext = VMA_NULL;
17307 outRange.memory = allocation->GetMemory();
17309 switch(allocation->GetType())
17311 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17312 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17313 if(size == VK_WHOLE_SIZE)
17315 outRange.size = allocationSize - outRange.offset;
17319 VMA_ASSERT(offset + size <= allocationSize);
17320 outRange.size = VMA_MIN(
17321 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17322 allocationSize - outRange.offset);
17325 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17328 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17329 if(size == VK_WHOLE_SIZE)
17331 size = allocationSize - offset;
17335 VMA_ASSERT(offset + size <= allocationSize);
17337 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17340 const VkDeviceSize allocationOffset = allocation->GetOffset();
17341 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17342 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17343 outRange.offset += allocationOffset;
17344 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17356 #if VMA_MEMORY_BUDGET
17358 void VmaAllocator_T::UpdateVulkanBudget()
17360 VMA_ASSERT(m_UseExtMemoryBudget);
17362 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17364 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17365 VmaPnextChainPushFront(&memProps, &budgetProps);
17367 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17370 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17372 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17374 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17375 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17376 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17379 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17381 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17383 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17385 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17387 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17389 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17392 m_Budget.m_OperationsSinceBudgetFetch = 0;
17396 #endif // #if VMA_MEMORY_BUDGET
17398 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17400 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17401 !hAllocation->CanBecomeLost() &&
17402 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17404 void* pData = VMA_NULL;
17405 VkResult res = Map(hAllocation, &pData);
17406 if(res == VK_SUCCESS)
17408 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17409 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17410 Unmap(hAllocation);
17414 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17419 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17421 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17422 if(memoryTypeBits == UINT32_MAX)
17424 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17425 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17427 return memoryTypeBits;
17430 #if VMA_STATS_STRING_ENABLED
17432 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17434 bool dedicatedAllocationsStarted =
false;
17435 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17437 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17438 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17439 VMA_ASSERT(pDedicatedAllocVector);
17440 if(pDedicatedAllocVector->empty() ==
false)
17442 if(dedicatedAllocationsStarted ==
false)
17444 dedicatedAllocationsStarted =
true;
17445 json.WriteString(
"DedicatedAllocations");
17446 json.BeginObject();
17449 json.BeginString(
"Type ");
17450 json.ContinueString(memTypeIndex);
17455 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17457 json.BeginObject(
true);
17459 hAlloc->PrintParameters(json);
17466 if(dedicatedAllocationsStarted)
17472 bool allocationsStarted =
false;
17473 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17475 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17477 if(allocationsStarted ==
false)
17479 allocationsStarted =
true;
17480 json.WriteString(
"DefaultPools");
17481 json.BeginObject();
17484 json.BeginString(
"Type ");
17485 json.ContinueString(memTypeIndex);
17488 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17491 if(allocationsStarted)
17499 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17500 const size_t poolCount = m_Pools.size();
17503 json.WriteString(
"Pools");
17504 json.BeginObject();
17505 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17507 json.BeginString();
17508 json.ContinueString(m_Pools[poolIndex]->GetId());
17511 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17518 #endif // #if VMA_STATS_STRING_ENABLED
17527 VMA_ASSERT(pCreateInfo && pAllocator);
17530 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17532 return (*pAllocator)->Init(pCreateInfo);
17538 if(allocator != VK_NULL_HANDLE)
17540 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17541 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17542 vma_delete(&allocationCallbacks, allocator);
17548 VMA_ASSERT(allocator && pAllocatorInfo);
17549 pAllocatorInfo->
instance = allocator->m_hInstance;
17550 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17551 pAllocatorInfo->
device = allocator->m_hDevice;
17556 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17558 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17559 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17564 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17566 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17567 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17572 uint32_t memoryTypeIndex,
17573 VkMemoryPropertyFlags* pFlags)
17575 VMA_ASSERT(allocator && pFlags);
17576 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17577 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17582 uint32_t frameIndex)
17584 VMA_ASSERT(allocator);
17585 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17587 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17589 allocator->SetCurrentFrameIndex(frameIndex);
17596 VMA_ASSERT(allocator && pStats);
17597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17598 allocator->CalculateStats(pStats);
17605 VMA_ASSERT(allocator && pBudget);
17606 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17607 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17610 #if VMA_STATS_STRING_ENABLED
17614 char** ppStatsString,
17615 VkBool32 detailedMap)
17617 VMA_ASSERT(allocator && ppStatsString);
17618 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17620 VmaStringBuilder sb(allocator);
17622 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17623 json.BeginObject();
17626 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17629 allocator->CalculateStats(&stats);
17631 json.WriteString(
"Total");
17632 VmaPrintStatInfo(json, stats.
total);
17634 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17636 json.BeginString(
"Heap ");
17637 json.ContinueString(heapIndex);
17639 json.BeginObject();
17641 json.WriteString(
"Size");
17642 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17644 json.WriteString(
"Flags");
17645 json.BeginArray(
true);
17646 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17648 json.WriteString(
"DEVICE_LOCAL");
17652 json.WriteString(
"Budget");
17653 json.BeginObject();
17655 json.WriteString(
"BlockBytes");
17656 json.WriteNumber(budget[heapIndex].blockBytes);
17657 json.WriteString(
"AllocationBytes");
17658 json.WriteNumber(budget[heapIndex].allocationBytes);
17659 json.WriteString(
"Usage");
17660 json.WriteNumber(budget[heapIndex].usage);
17661 json.WriteString(
"Budget");
17662 json.WriteNumber(budget[heapIndex].budget);
17668 json.WriteString(
"Stats");
17669 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17672 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17674 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17676 json.BeginString(
"Type ");
17677 json.ContinueString(typeIndex);
17680 json.BeginObject();
17682 json.WriteString(
"Flags");
17683 json.BeginArray(
true);
17684 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17685 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17687 json.WriteString(
"DEVICE_LOCAL");
17689 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17691 json.WriteString(
"HOST_VISIBLE");
17693 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17695 json.WriteString(
"HOST_COHERENT");
17697 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17699 json.WriteString(
"HOST_CACHED");
17701 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17703 json.WriteString(
"LAZILY_ALLOCATED");
17705 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17707 json.WriteString(
" PROTECTED");
17709 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17711 json.WriteString(
" DEVICE_COHERENT");
17713 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17715 json.WriteString(
" DEVICE_UNCACHED");
17721 json.WriteString(
"Stats");
17722 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17731 if(detailedMap == VK_TRUE)
17733 allocator->PrintDetailedMap(json);
17739 const size_t len = sb.GetLength();
17740 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17743 memcpy(pChars, sb.GetData(), len);
17745 pChars[len] =
'\0';
17746 *ppStatsString = pChars;
17751 char* pStatsString)
17753 if(pStatsString != VMA_NULL)
17755 VMA_ASSERT(allocator);
17756 size_t len = strlen(pStatsString);
17757 vma_delete_array(allocator, pStatsString, len + 1);
17761 #endif // #if VMA_STATS_STRING_ENABLED
17768 uint32_t memoryTypeBits,
17770 uint32_t* pMemoryTypeIndex)
17772 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17773 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17774 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17776 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17783 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17784 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17785 uint32_t notPreferredFlags = 0;
17788 switch(pAllocationCreateInfo->
usage)
17793 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17795 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17799 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17802 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17803 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17805 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17809 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17810 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17813 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17816 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17825 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17827 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17830 *pMemoryTypeIndex = UINT32_MAX;
17831 uint32_t minCost = UINT32_MAX;
17832 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17833 memTypeIndex < allocator->GetMemoryTypeCount();
17834 ++memTypeIndex, memTypeBit <<= 1)
17837 if((memTypeBit & memoryTypeBits) != 0)
17839 const VkMemoryPropertyFlags currFlags =
17840 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17842 if((requiredFlags & ~currFlags) == 0)
17845 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17846 VmaCountBitsSet(currFlags & notPreferredFlags);
17848 if(currCost < minCost)
17850 *pMemoryTypeIndex = memTypeIndex;
17855 minCost = currCost;
17860 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17865 const VkBufferCreateInfo* pBufferCreateInfo,
17867 uint32_t* pMemoryTypeIndex)
17869 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17870 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17871 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17872 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17874 const VkDevice hDev = allocator->m_hDevice;
17875 VkBuffer hBuffer = VK_NULL_HANDLE;
17876 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17877 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17878 if(res == VK_SUCCESS)
17880 VkMemoryRequirements memReq = {};
17881 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17882 hDev, hBuffer, &memReq);
17886 memReq.memoryTypeBits,
17887 pAllocationCreateInfo,
17890 allocator->GetVulkanFunctions().vkDestroyBuffer(
17891 hDev, hBuffer, allocator->GetAllocationCallbacks());
17898 const VkImageCreateInfo* pImageCreateInfo,
17900 uint32_t* pMemoryTypeIndex)
17902 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17903 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17904 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17905 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17907 const VkDevice hDev = allocator->m_hDevice;
17908 VkImage hImage = VK_NULL_HANDLE;
17909 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17910 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17911 if(res == VK_SUCCESS)
17913 VkMemoryRequirements memReq = {};
17914 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17915 hDev, hImage, &memReq);
17919 memReq.memoryTypeBits,
17920 pAllocationCreateInfo,
17923 allocator->GetVulkanFunctions().vkDestroyImage(
17924 hDev, hImage, allocator->GetAllocationCallbacks());
17934 VMA_ASSERT(allocator && pCreateInfo && pPool);
17936 VMA_DEBUG_LOG(
"vmaCreatePool");
17938 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17940 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17942 #if VMA_RECORDING_ENABLED
17943 if(allocator->GetRecorder() != VMA_NULL)
17945 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17956 VMA_ASSERT(allocator);
17958 if(pool == VK_NULL_HANDLE)
17963 VMA_DEBUG_LOG(
"vmaDestroyPool");
17965 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17967 #if VMA_RECORDING_ENABLED
17968 if(allocator->GetRecorder() != VMA_NULL)
17970 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17974 allocator->DestroyPool(pool);
17982 VMA_ASSERT(allocator && pool && pPoolStats);
17984 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17986 allocator->GetPoolStats(pool, pPoolStats);
17992 size_t* pLostAllocationCount)
17994 VMA_ASSERT(allocator && pool);
17996 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17998 #if VMA_RECORDING_ENABLED
17999 if(allocator->GetRecorder() != VMA_NULL)
18001 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18005 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18010 VMA_ASSERT(allocator && pool);
18012 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18014 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18016 return allocator->CheckPoolCorruption(pool);
18022 const char** ppName)
18024 VMA_ASSERT(allocator && pool && ppName);
18026 VMA_DEBUG_LOG(
"vmaGetPoolName");
18028 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18030 *ppName = pool->GetName();
18038 VMA_ASSERT(allocator && pool);
18040 VMA_DEBUG_LOG(
"vmaSetPoolName");
18042 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18044 pool->SetName(pName);
18046 #if VMA_RECORDING_ENABLED
18047 if(allocator->GetRecorder() != VMA_NULL)
18049 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18056 const VkMemoryRequirements* pVkMemoryRequirements,
18061 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18063 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18065 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18067 VkResult result = allocator->AllocateMemory(
18068 *pVkMemoryRequirements,
18075 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18079 #if VMA_RECORDING_ENABLED
18080 if(allocator->GetRecorder() != VMA_NULL)
18082 allocator->GetRecorder()->RecordAllocateMemory(
18083 allocator->GetCurrentFrameIndex(),
18084 *pVkMemoryRequirements,
18090 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18092 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18100 const VkMemoryRequirements* pVkMemoryRequirements,
18102 size_t allocationCount,
18106 if(allocationCount == 0)
18111 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18113 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18115 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18117 VkResult result = allocator->AllocateMemory(
18118 *pVkMemoryRequirements,
18125 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18129 #if VMA_RECORDING_ENABLED
18130 if(allocator->GetRecorder() != VMA_NULL)
18132 allocator->GetRecorder()->RecordAllocateMemoryPages(
18133 allocator->GetCurrentFrameIndex(),
18134 *pVkMemoryRequirements,
18136 (uint64_t)allocationCount,
18141 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18143 for(
size_t i = 0; i < allocationCount; ++i)
18145 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18159 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18161 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18163 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18165 VkMemoryRequirements vkMemReq = {};
18166 bool requiresDedicatedAllocation =
false;
18167 bool prefersDedicatedAllocation =
false;
18168 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18169 requiresDedicatedAllocation,
18170 prefersDedicatedAllocation);
18172 VkResult result = allocator->AllocateMemory(
18174 requiresDedicatedAllocation,
18175 prefersDedicatedAllocation,
18180 VMA_SUBALLOCATION_TYPE_BUFFER,
18184 #if VMA_RECORDING_ENABLED
18185 if(allocator->GetRecorder() != VMA_NULL)
18187 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18188 allocator->GetCurrentFrameIndex(),
18190 requiresDedicatedAllocation,
18191 prefersDedicatedAllocation,
18197 if(pAllocationInfo && result == VK_SUCCESS)
18199 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18212 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18214 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18216 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18218 VkMemoryRequirements vkMemReq = {};
18219 bool requiresDedicatedAllocation =
false;
18220 bool prefersDedicatedAllocation =
false;
18221 allocator->GetImageMemoryRequirements(image, vkMemReq,
18222 requiresDedicatedAllocation, prefersDedicatedAllocation);
18224 VkResult result = allocator->AllocateMemory(
18226 requiresDedicatedAllocation,
18227 prefersDedicatedAllocation,
18232 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18236 #if VMA_RECORDING_ENABLED
18237 if(allocator->GetRecorder() != VMA_NULL)
18239 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18240 allocator->GetCurrentFrameIndex(),
18242 requiresDedicatedAllocation,
18243 prefersDedicatedAllocation,
18249 if(pAllocationInfo && result == VK_SUCCESS)
18251 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18261 VMA_ASSERT(allocator);
18263 if(allocation == VK_NULL_HANDLE)
18268 VMA_DEBUG_LOG(
"vmaFreeMemory");
18270 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18272 #if VMA_RECORDING_ENABLED
18273 if(allocator->GetRecorder() != VMA_NULL)
18275 allocator->GetRecorder()->RecordFreeMemory(
18276 allocator->GetCurrentFrameIndex(),
18281 allocator->FreeMemory(
18288 size_t allocationCount,
18291 if(allocationCount == 0)
18296 VMA_ASSERT(allocator);
18298 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18300 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18302 #if VMA_RECORDING_ENABLED
18303 if(allocator->GetRecorder() != VMA_NULL)
18305 allocator->GetRecorder()->RecordFreeMemoryPages(
18306 allocator->GetCurrentFrameIndex(),
18307 (uint64_t)allocationCount,
18312 allocator->FreeMemory(allocationCount, pAllocations);
18318 VkDeviceSize newSize)
18320 VMA_ASSERT(allocator && allocation);
18322 VMA_DEBUG_LOG(
"vmaResizeAllocation");
18324 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18326 return allocator->ResizeAllocation(allocation, newSize);
18334 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18336 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18338 #if VMA_RECORDING_ENABLED
18339 if(allocator->GetRecorder() != VMA_NULL)
18341 allocator->GetRecorder()->RecordGetAllocationInfo(
18342 allocator->GetCurrentFrameIndex(),
18347 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18354 VMA_ASSERT(allocator && allocation);
18356 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18358 #if VMA_RECORDING_ENABLED
18359 if(allocator->GetRecorder() != VMA_NULL)
18361 allocator->GetRecorder()->RecordTouchAllocation(
18362 allocator->GetCurrentFrameIndex(),
18367 return allocator->TouchAllocation(allocation);
18375 VMA_ASSERT(allocator && allocation);
18377 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18379 allocation->SetUserData(allocator, pUserData);
18381 #if VMA_RECORDING_ENABLED
18382 if(allocator->GetRecorder() != VMA_NULL)
18384 allocator->GetRecorder()->RecordSetAllocationUserData(
18385 allocator->GetCurrentFrameIndex(),
18396 VMA_ASSERT(allocator && pAllocation);
18398 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18400 allocator->CreateLostAllocation(pAllocation);
18402 #if VMA_RECORDING_ENABLED
18403 if(allocator->GetRecorder() != VMA_NULL)
18405 allocator->GetRecorder()->RecordCreateLostAllocation(
18406 allocator->GetCurrentFrameIndex(),
18417 VMA_ASSERT(allocator && allocation && ppData);
18419 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18421 VkResult res = allocator->Map(allocation, ppData);
18423 #if VMA_RECORDING_ENABLED
18424 if(allocator->GetRecorder() != VMA_NULL)
18426 allocator->GetRecorder()->RecordMapMemory(
18427 allocator->GetCurrentFrameIndex(),
18439 VMA_ASSERT(allocator && allocation);
18441 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18443 #if VMA_RECORDING_ENABLED
18444 if(allocator->GetRecorder() != VMA_NULL)
18446 allocator->GetRecorder()->RecordUnmapMemory(
18447 allocator->GetCurrentFrameIndex(),
18452 allocator->Unmap(allocation);
18457 VMA_ASSERT(allocator && allocation);
18459 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18461 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18463 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18465 #if VMA_RECORDING_ENABLED
18466 if(allocator->GetRecorder() != VMA_NULL)
18468 allocator->GetRecorder()->RecordFlushAllocation(
18469 allocator->GetCurrentFrameIndex(),
18470 allocation, offset, size);
18479 VMA_ASSERT(allocator && allocation);
18481 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18483 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18485 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18487 #if VMA_RECORDING_ENABLED
18488 if(allocator->GetRecorder() != VMA_NULL)
18490 allocator->GetRecorder()->RecordInvalidateAllocation(
18491 allocator->GetCurrentFrameIndex(),
18492 allocation, offset, size);
18501 uint32_t allocationCount,
18503 const VkDeviceSize* offsets,
18504 const VkDeviceSize* sizes)
18506 VMA_ASSERT(allocator);
18508 if(allocationCount == 0)
18513 VMA_ASSERT(allocations);
18515 VMA_DEBUG_LOG(
"vmaFlushAllocations");
18517 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18519 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
18521 #if VMA_RECORDING_ENABLED
18522 if(allocator->GetRecorder() != VMA_NULL)
18533 uint32_t allocationCount,
18535 const VkDeviceSize* offsets,
18536 const VkDeviceSize* sizes)
18538 VMA_ASSERT(allocator);
18540 if(allocationCount == 0)
18545 VMA_ASSERT(allocations);
18547 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
18549 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18551 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
18553 #if VMA_RECORDING_ENABLED
18554 if(allocator->GetRecorder() != VMA_NULL)
18565 VMA_ASSERT(allocator);
18567 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18569 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18571 return allocator->CheckCorruption(memoryTypeBits);
18577 size_t allocationCount,
18578 VkBool32* pAllocationsChanged,
18588 if(pDefragmentationInfo != VMA_NULL)
18602 if(res == VK_NOT_READY)
18615 VMA_ASSERT(allocator && pInfo && pContext);
18626 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18628 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18630 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18632 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18634 #if VMA_RECORDING_ENABLED
18635 if(allocator->GetRecorder() != VMA_NULL)
18637 allocator->GetRecorder()->RecordDefragmentationBegin(
18638 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18649 VMA_ASSERT(allocator);
18651 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18653 if(context != VK_NULL_HANDLE)
18655 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18657 #if VMA_RECORDING_ENABLED
18658 if(allocator->GetRecorder() != VMA_NULL)
18660 allocator->GetRecorder()->RecordDefragmentationEnd(
18661 allocator->GetCurrentFrameIndex(), context);
18665 return allocator->DefragmentationEnd(context);
18679 VMA_ASSERT(allocator);
18682 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18684 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18686 if(context == VK_NULL_HANDLE)
18692 return allocator->DefragmentationPassBegin(pInfo, context);
18698 VMA_ASSERT(allocator);
18700 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18701 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18703 if(context == VK_NULL_HANDLE)
18706 return allocator->DefragmentationPassEnd(context);
18714 VMA_ASSERT(allocator && allocation && buffer);
18716 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18718 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18720 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18726 VkDeviceSize allocationLocalOffset,
18730 VMA_ASSERT(allocator && allocation && buffer);
18732 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18734 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18736 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18744 VMA_ASSERT(allocator && allocation && image);
18746 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18748 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18750 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18756 VkDeviceSize allocationLocalOffset,
18760 VMA_ASSERT(allocator && allocation && image);
18762 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18764 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18766 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18771 const VkBufferCreateInfo* pBufferCreateInfo,
18777 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18779 if(pBufferCreateInfo->size == 0)
18781 return VK_ERROR_VALIDATION_FAILED_EXT;
18783 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18784 !allocator->m_UseKhrBufferDeviceAddress)
18786 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18787 return VK_ERROR_VALIDATION_FAILED_EXT;
18790 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18792 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18794 *pBuffer = VK_NULL_HANDLE;
18795 *pAllocation = VK_NULL_HANDLE;
18798 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18799 allocator->m_hDevice,
18801 allocator->GetAllocationCallbacks(),
18806 VkMemoryRequirements vkMemReq = {};
18807 bool requiresDedicatedAllocation =
false;
18808 bool prefersDedicatedAllocation =
false;
18809 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18810 requiresDedicatedAllocation, prefersDedicatedAllocation);
18813 res = allocator->AllocateMemory(
18815 requiresDedicatedAllocation,
18816 prefersDedicatedAllocation,
18818 pBufferCreateInfo->usage,
18820 *pAllocationCreateInfo,
18821 VMA_SUBALLOCATION_TYPE_BUFFER,
18825 #if VMA_RECORDING_ENABLED
18826 if(allocator->GetRecorder() != VMA_NULL)
18828 allocator->GetRecorder()->RecordCreateBuffer(
18829 allocator->GetCurrentFrameIndex(),
18830 *pBufferCreateInfo,
18831 *pAllocationCreateInfo,
18841 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18846 #if VMA_STATS_STRING_ENABLED
18847 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18849 if(pAllocationInfo != VMA_NULL)
18851 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18856 allocator->FreeMemory(
18859 *pAllocation = VK_NULL_HANDLE;
18860 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18861 *pBuffer = VK_NULL_HANDLE;
18864 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18865 *pBuffer = VK_NULL_HANDLE;
18876 VMA_ASSERT(allocator);
18878 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18883 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18885 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18887 #if VMA_RECORDING_ENABLED
18888 if(allocator->GetRecorder() != VMA_NULL)
18890 allocator->GetRecorder()->RecordDestroyBuffer(
18891 allocator->GetCurrentFrameIndex(),
18896 if(buffer != VK_NULL_HANDLE)
18898 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18901 if(allocation != VK_NULL_HANDLE)
18903 allocator->FreeMemory(
18911 const VkImageCreateInfo* pImageCreateInfo,
18917 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18919 if(pImageCreateInfo->extent.width == 0 ||
18920 pImageCreateInfo->extent.height == 0 ||
18921 pImageCreateInfo->extent.depth == 0 ||
18922 pImageCreateInfo->mipLevels == 0 ||
18923 pImageCreateInfo->arrayLayers == 0)
18925 return VK_ERROR_VALIDATION_FAILED_EXT;
18928 VMA_DEBUG_LOG(
"vmaCreateImage");
18930 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18932 *pImage = VK_NULL_HANDLE;
18933 *pAllocation = VK_NULL_HANDLE;
18936 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18937 allocator->m_hDevice,
18939 allocator->GetAllocationCallbacks(),
18943 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18944 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18945 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18948 VkMemoryRequirements vkMemReq = {};
18949 bool requiresDedicatedAllocation =
false;
18950 bool prefersDedicatedAllocation =
false;
18951 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18952 requiresDedicatedAllocation, prefersDedicatedAllocation);
18954 res = allocator->AllocateMemory(
18956 requiresDedicatedAllocation,
18957 prefersDedicatedAllocation,
18961 *pAllocationCreateInfo,
18966 #if VMA_RECORDING_ENABLED
18967 if(allocator->GetRecorder() != VMA_NULL)
18969 allocator->GetRecorder()->RecordCreateImage(
18970 allocator->GetCurrentFrameIndex(),
18972 *pAllocationCreateInfo,
18982 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18987 #if VMA_STATS_STRING_ENABLED
18988 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18990 if(pAllocationInfo != VMA_NULL)
18992 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18997 allocator->FreeMemory(
19000 *pAllocation = VK_NULL_HANDLE;
19001 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19002 *pImage = VK_NULL_HANDLE;
19005 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19006 *pImage = VK_NULL_HANDLE;
19017 VMA_ASSERT(allocator);
19019 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19024 VMA_DEBUG_LOG(
"vmaDestroyImage");
19026 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19028 #if VMA_RECORDING_ENABLED
19029 if(allocator->GetRecorder() != VMA_NULL)
19031 allocator->GetRecorder()->RecordDestroyImage(
19032 allocator->GetCurrentFrameIndex(),
19037 if(image != VK_NULL_HANDLE)
19039 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19041 if(allocation != VK_NULL_HANDLE)
19043 allocator->FreeMemory(
19049 #endif // #ifdef VMA_IMPLEMENTATION