23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1892 #if VMA_RECORDING_ENABLED
1895 #include <windows.h>
1910 #ifndef VMA_RECORDING_ENABLED
1911 #define VMA_RECORDING_ENABLED 0
1915 #define NOMINMAX // For windows.h
1918 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
1919 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
1920 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
1921 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
1922 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
1923 extern PFN_vkAllocateMemory vkAllocateMemory;
1924 extern PFN_vkFreeMemory vkFreeMemory;
1925 extern PFN_vkMapMemory vkMapMemory;
1926 extern PFN_vkUnmapMemory vkUnmapMemory;
1927 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
1928 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
1929 extern PFN_vkBindBufferMemory vkBindBufferMemory;
1930 extern PFN_vkBindImageMemory vkBindImageMemory;
1931 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
1932 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
1933 extern PFN_vkCreateBuffer vkCreateBuffer;
1934 extern PFN_vkDestroyBuffer vkDestroyBuffer;
1935 extern PFN_vkCreateImage vkCreateImage;
1936 extern PFN_vkDestroyImage vkDestroyImage;
1937 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
1938 #if VMA_VULKAN_VERSION >= 1001000
1939 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
1940 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
1941 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
1942 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
1943 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
1944 #endif // #if VMA_VULKAN_VERSION >= 1001000
1945 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
1948 #include <vulkan/vulkan.h>
1954 #if !defined(VMA_VULKAN_VERSION)
1955 #if defined(VK_VERSION_1_2)
1956 #define VMA_VULKAN_VERSION 1002000
1957 #elif defined(VK_VERSION_1_1)
1958 #define VMA_VULKAN_VERSION 1001000
1960 #define VMA_VULKAN_VERSION 1000000
1964 #if !defined(VMA_DEDICATED_ALLOCATION)
1965 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1966 #define VMA_DEDICATED_ALLOCATION 1
1968 #define VMA_DEDICATED_ALLOCATION 0
1972 #if !defined(VMA_BIND_MEMORY2)
1973 #if VK_KHR_bind_memory2
1974 #define VMA_BIND_MEMORY2 1
1976 #define VMA_BIND_MEMORY2 0
1980 #if !defined(VMA_MEMORY_BUDGET)
1981 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1982 #define VMA_MEMORY_BUDGET 1
1984 #define VMA_MEMORY_BUDGET 0
1989 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
1990 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
1991 #define VMA_BUFFER_DEVICE_ADDRESS 1
1993 #define VMA_BUFFER_DEVICE_ADDRESS 0
2002 #ifndef VMA_CALL_PRE
2003 #define VMA_CALL_PRE
2005 #ifndef VMA_CALL_POST
2006 #define VMA_CALL_POST
2020 #ifndef VMA_LEN_IF_NOT_NULL
2021 #define VMA_LEN_IF_NOT_NULL(len)
2026 #ifndef VMA_NULLABLE
2028 #define VMA_NULLABLE _Nullable
2030 #define VMA_NULLABLE
2036 #ifndef VMA_NOT_NULL
2038 #define VMA_NOT_NULL _Nonnull
2040 #define VMA_NOT_NULL
2046 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2047 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2048 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2050 #define VMA_NOT_NULL_NON_DISPATCHABLE
2054 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2055 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2056 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2058 #define VMA_NULLABLE_NON_DISPATCHABLE
2076 uint32_t memoryType,
2077 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2079 void* VMA_NULLABLE pUserData);
2083 uint32_t memoryType,
2084 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2086 void* VMA_NULLABLE pUserData);
2226 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2227 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2228 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2230 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2231 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2232 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2234 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2235 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2325 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2398 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2406 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2416 uint32_t memoryTypeIndex,
2417 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2429 uint32_t frameIndex);
2525 #ifndef VMA_STATS_STRING_ENABLED
2526 #define VMA_STATS_STRING_ENABLED 1
2529 #if VMA_STATS_STRING_ENABLED
2536 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2537 VkBool32 detailedMap);
2541 char* VMA_NULLABLE pStatsString);
2543 #endif // #if VMA_STATS_STRING_ENABLED
2795 uint32_t memoryTypeBits,
2797 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2813 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2815 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2831 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2833 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2977 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3005 size_t* VMA_NULLABLE pLostAllocationCount);
3032 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3042 const char* VMA_NULLABLE pName);
3125 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3151 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3153 size_t allocationCount,
3154 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3155 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3165 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3173 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3198 size_t allocationCount,
3199 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3211 VkDeviceSize newSize);
3268 void* VMA_NULLABLE pUserData);
3325 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3363 VkDeviceSize offset,
3390 VkDeviceSize offset,
3409 uint32_t allocationCount,
3410 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3411 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3412 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3430 uint32_t allocationCount,
3431 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3432 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3433 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3512 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3546 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3684 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3685 size_t allocationCount,
3686 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3705 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3720 VkDeviceSize allocationLocalOffset,
3721 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3722 const void* VMA_NULLABLE pNext);
3739 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3754 VkDeviceSize allocationLocalOffset,
3755 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3756 const void* VMA_NULLABLE pNext);
3786 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3788 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3805 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3811 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3813 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3830 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
3837 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3840 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3841 #define VMA_IMPLEMENTATION
3844 #ifdef VMA_IMPLEMENTATION
3845 #undef VMA_IMPLEMENTATION
3865 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3866 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3875 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
3876 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
3888 #if VMA_USE_STL_CONTAINERS
3889 #define VMA_USE_STL_VECTOR 1
3890 #define VMA_USE_STL_UNORDERED_MAP 1
3891 #define VMA_USE_STL_LIST 1
3894 #ifndef VMA_USE_STL_SHARED_MUTEX
3896 #if __cplusplus >= 201703L
3897 #define VMA_USE_STL_SHARED_MUTEX 1
3901 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3902 #define VMA_USE_STL_SHARED_MUTEX 1
3904 #define VMA_USE_STL_SHARED_MUTEX 0
3912 #if VMA_USE_STL_VECTOR
3916 #if VMA_USE_STL_UNORDERED_MAP
3917 #include <unordered_map>
3920 #if VMA_USE_STL_LIST
3929 #include <algorithm>
3934 #define VMA_NULL nullptr
3937 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3939 void *aligned_alloc(
size_t alignment,
size_t size)
3942 if(alignment <
sizeof(
void*))
3944 alignment =
sizeof(
void*);
3947 return memalign(alignment, size);
3949 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3951 void *aligned_alloc(
size_t alignment,
size_t size)
3954 if(alignment <
sizeof(
void*))
3956 alignment =
sizeof(
void*);
3960 if(posix_memalign(&pointer, alignment, size) == 0)
3974 #define VMA_ASSERT(expr)
3976 #define VMA_ASSERT(expr) assert(expr)
3982 #ifndef VMA_HEAVY_ASSERT
3984 #define VMA_HEAVY_ASSERT(expr)
3986 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3990 #ifndef VMA_ALIGN_OF
3991 #define VMA_ALIGN_OF(type) (__alignof(type))
3994 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3996 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3998 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
4002 #ifndef VMA_SYSTEM_FREE
4004 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
4006 #define VMA_SYSTEM_FREE(ptr) free(ptr)
4011 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4015 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4019 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4023 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4026 #ifndef VMA_DEBUG_LOG
4027 #define VMA_DEBUG_LOG(format, ...)
4037 #if VMA_STATS_STRING_ENABLED
4038 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4040 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4042 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4044 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4046 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4048 snprintf(outStr, strLen,
"%p", ptr);
4056 void Lock() { m_Mutex.lock(); }
4057 void Unlock() { m_Mutex.unlock(); }
4058 bool TryLock() {
return m_Mutex.try_lock(); }
4062 #define VMA_MUTEX VmaMutex
4066 #ifndef VMA_RW_MUTEX
4067 #if VMA_USE_STL_SHARED_MUTEX
4069 #include <shared_mutex>
4073 void LockRead() { m_Mutex.lock_shared(); }
4074 void UnlockRead() { m_Mutex.unlock_shared(); }
4075 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4076 void LockWrite() { m_Mutex.lock(); }
4077 void UnlockWrite() { m_Mutex.unlock(); }
4078 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4080 std::shared_mutex m_Mutex;
4082 #define VMA_RW_MUTEX VmaRWMutex
4083 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4089 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4090 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4091 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4092 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4093 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4094 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4095 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4099 #define VMA_RW_MUTEX VmaRWMutex
4105 void LockRead() { m_Mutex.Lock(); }
4106 void UnlockRead() { m_Mutex.Unlock(); }
4107 bool TryLockRead() {
return m_Mutex.TryLock(); }
4108 void LockWrite() { m_Mutex.Lock(); }
4109 void UnlockWrite() { m_Mutex.Unlock(); }
4110 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4114 #define VMA_RW_MUTEX VmaRWMutex
4115 #endif // #if VMA_USE_STL_SHARED_MUTEX
4116 #endif // #ifndef VMA_RW_MUTEX
4121 #ifndef VMA_ATOMIC_UINT32
4123 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4126 #ifndef VMA_ATOMIC_UINT64
4128 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4131 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4136 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4139 #ifndef VMA_DEBUG_ALIGNMENT
4144 #define VMA_DEBUG_ALIGNMENT (1)
4147 #ifndef VMA_DEBUG_MARGIN
4152 #define VMA_DEBUG_MARGIN (0)
4155 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4160 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4163 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4169 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4172 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4177 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4180 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4185 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4188 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4189 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4193 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4194 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4198 #ifndef VMA_CLASS_NO_COPY
4199 #define VMA_CLASS_NO_COPY(className) \
4201 className(const className&) = delete; \
4202 className& operator=(const className&) = delete;
4205 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4208 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4210 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4211 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4219 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4220 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4221 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4223 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4225 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4226 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4229 static inline uint32_t VmaCountBitsSet(uint32_t v)
4231 uint32_t c = v - ((v >> 1) & 0x55555555);
4232 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4233 c = ((c >> 4) + c) & 0x0F0F0F0F;
4234 c = ((c >> 8) + c) & 0x00FF00FF;
4235 c = ((c >> 16) + c) & 0x0000FFFF;
4241 template <
typename T>
4242 static inline T VmaAlignUp(T val, T align)
4244 return (val + align - 1) / align * align;
4248 template <
typename T>
4249 static inline T VmaAlignDown(T val, T align)
4251 return val / align * align;
4255 template <
typename T>
4256 static inline T VmaRoundDiv(T x, T y)
4258 return (x + (y / (T)2)) / y;
4266 template <
typename T>
4267 inline bool VmaIsPow2(T x)
4269 return (x & (x-1)) == 0;
4273 static inline uint32_t VmaNextPow2(uint32_t v)
4284 static inline uint64_t VmaNextPow2(uint64_t v)
4298 static inline uint32_t VmaPrevPow2(uint32_t v)
4308 static inline uint64_t VmaPrevPow2(uint64_t v)
4320 static inline bool VmaStrIsEmpty(
const char* pStr)
4322 return pStr == VMA_NULL || *pStr ==
'\0';
4325 #if VMA_STATS_STRING_ENABLED
4327 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4343 #endif // #if VMA_STATS_STRING_ENABLED
4347 template<
typename Iterator,
typename Compare>
4348 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4350 Iterator centerValue = end; --centerValue;
4351 Iterator insertIndex = beg;
4352 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4354 if(cmp(*memTypeIndex, *centerValue))
4356 if(insertIndex != memTypeIndex)
4358 VMA_SWAP(*memTypeIndex, *insertIndex);
4363 if(insertIndex != centerValue)
4365 VMA_SWAP(*insertIndex, *centerValue);
4370 template<
typename Iterator,
typename Compare>
4371 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4375 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4376 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4377 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4381 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4383 #endif // #ifndef VMA_SORT
4392 static inline bool VmaBlocksOnSamePage(
4393 VkDeviceSize resourceAOffset,
4394 VkDeviceSize resourceASize,
4395 VkDeviceSize resourceBOffset,
4396 VkDeviceSize pageSize)
4398 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4399 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4400 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4401 VkDeviceSize resourceBStart = resourceBOffset;
4402 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4403 return resourceAEndPage == resourceBStartPage;
4406 enum VmaSuballocationType
4408 VMA_SUBALLOCATION_TYPE_FREE = 0,
4409 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4410 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4411 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4412 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4413 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4414 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4423 static inline bool VmaIsBufferImageGranularityConflict(
4424 VmaSuballocationType suballocType1,
4425 VmaSuballocationType suballocType2)
4427 if(suballocType1 > suballocType2)
4429 VMA_SWAP(suballocType1, suballocType2);
4432 switch(suballocType1)
4434 case VMA_SUBALLOCATION_TYPE_FREE:
4436 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4438 case VMA_SUBALLOCATION_TYPE_BUFFER:
4440 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4441 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4442 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4444 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4445 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4446 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4447 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4449 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4450 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4458 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4460 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4461 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4462 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4463 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4465 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4472 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4474 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4475 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4476 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4477 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4479 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4492 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4494 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4495 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4496 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4497 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4503 VMA_CLASS_NO_COPY(VmaMutexLock)
4505 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4506 m_pMutex(useMutex ? &mutex : VMA_NULL)
4507 {
if(m_pMutex) { m_pMutex->Lock(); } }
4509 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4511 VMA_MUTEX* m_pMutex;
4515 struct VmaMutexLockRead
4517 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4519 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4520 m_pMutex(useMutex ? &mutex : VMA_NULL)
4521 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4522 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4524 VMA_RW_MUTEX* m_pMutex;
4528 struct VmaMutexLockWrite
4530 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4532 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4533 m_pMutex(useMutex ? &mutex : VMA_NULL)
4534 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4535 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4537 VMA_RW_MUTEX* m_pMutex;
4540 #if VMA_DEBUG_GLOBAL_MUTEX
4541 static VMA_MUTEX gDebugGlobalMutex;
4542 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4544 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4548 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4559 template <
typename CmpLess,
typename IterT,
typename KeyT>
4560 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4562 size_t down = 0, up = (end - beg);
4565 const size_t mid = (down + up) / 2;
4566 if(cmp(*(beg+mid), key))
4578 template<
typename CmpLess,
typename IterT,
typename KeyT>
4579 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4581 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4582 beg, end, value, cmp);
4584 (!cmp(*it, value) && !cmp(value, *it)))
4596 template<
typename T>
4597 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4599 for(uint32_t i = 0; i < count; ++i)
4601 const T iPtr = arr[i];
4602 if(iPtr == VMA_NULL)
4606 for(uint32_t j = i + 1; j < count; ++j)
4617 template<
typename MainT,
typename NewT>
4618 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4620 newStruct->pNext = mainStruct->pNext;
4621 mainStruct->pNext = newStruct;
4627 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4629 if((pAllocationCallbacks != VMA_NULL) &&
4630 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4632 return (*pAllocationCallbacks->pfnAllocation)(
4633 pAllocationCallbacks->pUserData,
4636 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4640 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4644 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4646 if((pAllocationCallbacks != VMA_NULL) &&
4647 (pAllocationCallbacks->pfnFree != VMA_NULL))
4649 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4653 VMA_SYSTEM_FREE(ptr);
4657 template<
typename T>
4658 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4660 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4663 template<
typename T>
4664 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4666 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4669 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4671 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4673 template<
typename T>
4674 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4677 VmaFree(pAllocationCallbacks, ptr);
4680 template<
typename T>
4681 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4685 for(
size_t i = count; i--; )
4689 VmaFree(pAllocationCallbacks, ptr);
4693 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4695 if(srcStr != VMA_NULL)
4697 const size_t len = strlen(srcStr);
4698 char*
const result = vma_new_array(allocs,
char, len + 1);
4699 memcpy(result, srcStr, len + 1);
4708 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4712 const size_t len = strlen(str);
4713 vma_delete_array(allocs, str, len + 1);
4718 template<
typename T>
4719 class VmaStlAllocator
4722 const VkAllocationCallbacks*
const m_pCallbacks;
4723 typedef T value_type;
4725 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4726 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4728 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4729 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4731 template<
typename U>
4732 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4734 return m_pCallbacks == rhs.m_pCallbacks;
4736 template<
typename U>
4737 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4739 return m_pCallbacks != rhs.m_pCallbacks;
4742 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4745 #if VMA_USE_STL_VECTOR
4747 #define VmaVector std::vector
4749 template<
typename T,
typename allocatorT>
4750 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4752 vec.insert(vec.begin() + index, item);
4755 template<
typename T,
typename allocatorT>
4756 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4758 vec.erase(vec.begin() + index);
4761 #else // #if VMA_USE_STL_VECTOR
4766 template<
typename T,
typename AllocatorT>
4770 typedef T value_type;
4772 VmaVector(
const AllocatorT& allocator) :
4773 m_Allocator(allocator),
4780 VmaVector(
size_t count,
const AllocatorT& allocator) :
4781 m_Allocator(allocator),
4782 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4790 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4791 : VmaVector(count, allocator) {}
4793 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4794 m_Allocator(src.m_Allocator),
4795 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4796 m_Count(src.m_Count),
4797 m_Capacity(src.m_Count)
4801 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4807 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4810 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4814 resize(rhs.m_Count);
4817 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4823 bool empty()
const {
return m_Count == 0; }
4824 size_t size()
const {
return m_Count; }
4825 T* data() {
return m_pArray; }
4826 const T* data()
const {
return m_pArray; }
4828 T& operator[](
size_t index)
4830 VMA_HEAVY_ASSERT(index < m_Count);
4831 return m_pArray[index];
4833 const T& operator[](
size_t index)
const
4835 VMA_HEAVY_ASSERT(index < m_Count);
4836 return m_pArray[index];
4841 VMA_HEAVY_ASSERT(m_Count > 0);
4844 const T& front()
const
4846 VMA_HEAVY_ASSERT(m_Count > 0);
4851 VMA_HEAVY_ASSERT(m_Count > 0);
4852 return m_pArray[m_Count - 1];
4854 const T& back()
const
4856 VMA_HEAVY_ASSERT(m_Count > 0);
4857 return m_pArray[m_Count - 1];
4860 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4862 newCapacity = VMA_MAX(newCapacity, m_Count);
4864 if((newCapacity < m_Capacity) && !freeMemory)
4866 newCapacity = m_Capacity;
4869 if(newCapacity != m_Capacity)
4871 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4874 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4876 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4877 m_Capacity = newCapacity;
4878 m_pArray = newArray;
4882 void resize(
size_t newCount,
bool freeMemory =
false)
4884 size_t newCapacity = m_Capacity;
4885 if(newCount > m_Capacity)
4887 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4891 newCapacity = newCount;
4894 if(newCapacity != m_Capacity)
4896 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4897 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4898 if(elementsToCopy != 0)
4900 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4902 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4903 m_Capacity = newCapacity;
4904 m_pArray = newArray;
4910 void clear(
bool freeMemory =
false)
4912 resize(0, freeMemory);
4915 void insert(
size_t index,
const T& src)
4917 VMA_HEAVY_ASSERT(index <= m_Count);
4918 const size_t oldCount = size();
4919 resize(oldCount + 1);
4920 if(index < oldCount)
4922 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4924 m_pArray[index] = src;
4927 void remove(
size_t index)
4929 VMA_HEAVY_ASSERT(index < m_Count);
4930 const size_t oldCount = size();
4931 if(index < oldCount - 1)
4933 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4935 resize(oldCount - 1);
4938 void push_back(
const T& src)
4940 const size_t newIndex = size();
4941 resize(newIndex + 1);
4942 m_pArray[newIndex] = src;
4947 VMA_HEAVY_ASSERT(m_Count > 0);
4951 void push_front(
const T& src)
4958 VMA_HEAVY_ASSERT(m_Count > 0);
4962 typedef T* iterator;
4964 iterator begin() {
return m_pArray; }
4965 iterator end() {
return m_pArray + m_Count; }
4968 AllocatorT m_Allocator;
4974 template<
typename T,
typename allocatorT>
4975 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4977 vec.insert(index, item);
4980 template<
typename T,
typename allocatorT>
4981 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4986 #endif // #if VMA_USE_STL_VECTOR
4988 template<
typename CmpLess,
typename VectorT>
4989 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4991 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4993 vector.data() + vector.size(),
4995 CmpLess()) - vector.data();
4996 VmaVectorInsert(vector, indexToInsert, value);
4997 return indexToInsert;
5000 template<
typename CmpLess,
typename VectorT>
5001 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5004 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5009 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5011 size_t indexToRemove = it - vector.begin();
5012 VmaVectorRemove(vector, indexToRemove);
5029 template<
typename T,
typename AllocatorT,
size_t N>
5030 class VmaSmallVector
5033 typedef T value_type;
5035 VmaSmallVector(
const AllocatorT& allocator) :
5037 m_DynamicArray(allocator)
5040 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5042 m_DynamicArray(count > N ? count : 0, allocator)
5045 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5046 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5047 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5048 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5050 bool empty()
const {
return m_Count == 0; }
5051 size_t size()
const {
return m_Count; }
5052 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5053 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5055 T& operator[](
size_t index)
5057 VMA_HEAVY_ASSERT(index < m_Count);
5058 return data()[index];
5060 const T& operator[](
size_t index)
const
5062 VMA_HEAVY_ASSERT(index < m_Count);
5063 return data()[index];
5068 VMA_HEAVY_ASSERT(m_Count > 0);
5071 const T& front()
const
5073 VMA_HEAVY_ASSERT(m_Count > 0);
5078 VMA_HEAVY_ASSERT(m_Count > 0);
5079 return data()[m_Count - 1];
5081 const T& back()
const
5083 VMA_HEAVY_ASSERT(m_Count > 0);
5084 return data()[m_Count - 1];
5087 void resize(
size_t newCount,
bool freeMemory =
false)
5089 if(newCount > N && m_Count > N)
5092 m_DynamicArray.resize(newCount, freeMemory);
5094 else if(newCount > N && m_Count <= N)
5097 m_DynamicArray.resize(newCount, freeMemory);
5100 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5103 else if(newCount <= N && m_Count > N)
5108 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5110 m_DynamicArray.resize(0, freeMemory);
5119 void clear(
bool freeMemory =
false)
5121 m_DynamicArray.clear(freeMemory);
5125 void insert(
size_t index,
const T& src)
5127 VMA_HEAVY_ASSERT(index <= m_Count);
5128 const size_t oldCount = size();
5129 resize(oldCount + 1);
5130 T*
const dataPtr = data();
5131 if(index < oldCount)
5134 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5136 dataPtr[index] = src;
5139 void remove(
size_t index)
5141 VMA_HEAVY_ASSERT(index < m_Count);
5142 const size_t oldCount = size();
5143 if(index < oldCount - 1)
5146 T*
const dataPtr = data();
5147 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5149 resize(oldCount - 1);
5152 void push_back(
const T& src)
5154 const size_t newIndex = size();
5155 resize(newIndex + 1);
5156 data()[newIndex] = src;
5161 VMA_HEAVY_ASSERT(m_Count > 0);
5165 void push_front(
const T& src)
5172 VMA_HEAVY_ASSERT(m_Count > 0);
5176 typedef T* iterator;
5178 iterator begin() {
return data(); }
5179 iterator end() {
return data() + m_Count; }
5184 VmaVector<T, AllocatorT> m_DynamicArray;
5195 template<
typename T>
5196 class VmaPoolAllocator
5198 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5200 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5201 ~VmaPoolAllocator();
5202 template<
typename... Types> T* Alloc(Types... args);
5208 uint32_t NextFreeIndex;
5209 alignas(T)
char Value[
sizeof(T)];
5216 uint32_t FirstFreeIndex;
5219 const VkAllocationCallbacks* m_pAllocationCallbacks;
5220 const uint32_t m_FirstBlockCapacity;
5221 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5223 ItemBlock& CreateNewBlock();
5226 template<
typename T>
5227 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5228 m_pAllocationCallbacks(pAllocationCallbacks),
5229 m_FirstBlockCapacity(firstBlockCapacity),
5230 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5232 VMA_ASSERT(m_FirstBlockCapacity > 1);
5235 template<
typename T>
5236 VmaPoolAllocator<T>::~VmaPoolAllocator()
5238 for(
size_t i = m_ItemBlocks.size(); i--; )
5239 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5240 m_ItemBlocks.clear();
5243 template<
typename T>
5244 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5246 for(
size_t i = m_ItemBlocks.size(); i--; )
5248 ItemBlock& block = m_ItemBlocks[i];
5250 if(block.FirstFreeIndex != UINT32_MAX)
5252 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5253 block.FirstFreeIndex = pItem->NextFreeIndex;
5254 T* result = (T*)&pItem->Value;
5255 new(result)T(std::forward<Types>(args)...);
5261 ItemBlock& newBlock = CreateNewBlock();
5262 Item*
const pItem = &newBlock.pItems[0];
5263 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5264 T* result = (T*)&pItem->Value;
5265 new(result)T(std::forward<Types>(args)...);
5269 template<
typename T>
5270 void VmaPoolAllocator<T>::Free(T* ptr)
5273 for(
size_t i = m_ItemBlocks.size(); i--; )
5275 ItemBlock& block = m_ItemBlocks[i];
5279 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5282 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5285 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5286 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5287 block.FirstFreeIndex = index;
5291 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5294 template<
typename T>
5295 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5297 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5298 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5300 const ItemBlock newBlock = {
5301 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5305 m_ItemBlocks.push_back(newBlock);
5308 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5309 newBlock.pItems[i].NextFreeIndex = i + 1;
5310 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5311 return m_ItemBlocks.back();
5317 #if VMA_USE_STL_LIST
5319 #define VmaList std::list
5321 #else // #if VMA_USE_STL_LIST
5323 template<
typename T>
5332 template<
typename T>
5335 VMA_CLASS_NO_COPY(VmaRawList)
5337 typedef VmaListItem<T> ItemType;
5339 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5343 size_t GetCount()
const {
return m_Count; }
5344 bool IsEmpty()
const {
return m_Count == 0; }
5346 ItemType* Front() {
return m_pFront; }
5347 const ItemType* Front()
const {
return m_pFront; }
5348 ItemType* Back() {
return m_pBack; }
5349 const ItemType* Back()
const {
return m_pBack; }
5351 ItemType* PushBack();
5352 ItemType* PushFront();
5353 ItemType* PushBack(
const T& value);
5354 ItemType* PushFront(
const T& value);
5359 ItemType* InsertBefore(ItemType* pItem);
5361 ItemType* InsertAfter(ItemType* pItem);
5363 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5364 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5366 void Remove(ItemType* pItem);
5369 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5370 VmaPoolAllocator<ItemType> m_ItemAllocator;
5376 template<
typename T>
5377 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5378 m_pAllocationCallbacks(pAllocationCallbacks),
5379 m_ItemAllocator(pAllocationCallbacks, 128),
5386 template<
typename T>
5387 VmaRawList<T>::~VmaRawList()
5393 template<
typename T>
5394 void VmaRawList<T>::Clear()
5396 if(IsEmpty() ==
false)
5398 ItemType* pItem = m_pBack;
5399 while(pItem != VMA_NULL)
5401 ItemType*
const pPrevItem = pItem->pPrev;
5402 m_ItemAllocator.Free(pItem);
5405 m_pFront = VMA_NULL;
5411 template<
typename T>
5412 VmaListItem<T>* VmaRawList<T>::PushBack()
5414 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5415 pNewItem->pNext = VMA_NULL;
5418 pNewItem->pPrev = VMA_NULL;
5419 m_pFront = pNewItem;
5425 pNewItem->pPrev = m_pBack;
5426 m_pBack->pNext = pNewItem;
5433 template<
typename T>
5434 VmaListItem<T>* VmaRawList<T>::PushFront()
5436 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5437 pNewItem->pPrev = VMA_NULL;
5440 pNewItem->pNext = VMA_NULL;
5441 m_pFront = pNewItem;
5447 pNewItem->pNext = m_pFront;
5448 m_pFront->pPrev = pNewItem;
5449 m_pFront = pNewItem;
5455 template<
typename T>
5456 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5458 ItemType*
const pNewItem = PushBack();
5459 pNewItem->Value = value;
5463 template<
typename T>
5464 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5466 ItemType*
const pNewItem = PushFront();
5467 pNewItem->Value = value;
5471 template<
typename T>
5472 void VmaRawList<T>::PopBack()
5474 VMA_HEAVY_ASSERT(m_Count > 0);
5475 ItemType*
const pBackItem = m_pBack;
5476 ItemType*
const pPrevItem = pBackItem->pPrev;
5477 if(pPrevItem != VMA_NULL)
5479 pPrevItem->pNext = VMA_NULL;
5481 m_pBack = pPrevItem;
5482 m_ItemAllocator.Free(pBackItem);
5486 template<
typename T>
5487 void VmaRawList<T>::PopFront()
5489 VMA_HEAVY_ASSERT(m_Count > 0);
5490 ItemType*
const pFrontItem = m_pFront;
5491 ItemType*
const pNextItem = pFrontItem->pNext;
5492 if(pNextItem != VMA_NULL)
5494 pNextItem->pPrev = VMA_NULL;
5496 m_pFront = pNextItem;
5497 m_ItemAllocator.Free(pFrontItem);
5501 template<
typename T>
5502 void VmaRawList<T>::Remove(ItemType* pItem)
5504 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5505 VMA_HEAVY_ASSERT(m_Count > 0);
5507 if(pItem->pPrev != VMA_NULL)
5509 pItem->pPrev->pNext = pItem->pNext;
5513 VMA_HEAVY_ASSERT(m_pFront == pItem);
5514 m_pFront = pItem->pNext;
5517 if(pItem->pNext != VMA_NULL)
5519 pItem->pNext->pPrev = pItem->pPrev;
5523 VMA_HEAVY_ASSERT(m_pBack == pItem);
5524 m_pBack = pItem->pPrev;
5527 m_ItemAllocator.Free(pItem);
5531 template<
typename T>
5532 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5534 if(pItem != VMA_NULL)
5536 ItemType*
const prevItem = pItem->pPrev;
5537 ItemType*
const newItem = m_ItemAllocator.Alloc();
5538 newItem->pPrev = prevItem;
5539 newItem->pNext = pItem;
5540 pItem->pPrev = newItem;
5541 if(prevItem != VMA_NULL)
5543 prevItem->pNext = newItem;
5547 VMA_HEAVY_ASSERT(m_pFront == pItem);
5557 template<
typename T>
5558 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5560 if(pItem != VMA_NULL)
5562 ItemType*
const nextItem = pItem->pNext;
5563 ItemType*
const newItem = m_ItemAllocator.Alloc();
5564 newItem->pNext = nextItem;
5565 newItem->pPrev = pItem;
5566 pItem->pNext = newItem;
5567 if(nextItem != VMA_NULL)
5569 nextItem->pPrev = newItem;
5573 VMA_HEAVY_ASSERT(m_pBack == pItem);
5583 template<
typename T>
5584 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5586 ItemType*
const newItem = InsertBefore(pItem);
5587 newItem->Value = value;
5591 template<
typename T>
5592 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5594 ItemType*
const newItem = InsertAfter(pItem);
5595 newItem->Value = value;
5599 template<
typename T,
typename AllocatorT>
5602 VMA_CLASS_NO_COPY(VmaList)
5613 T& operator*()
const
5615 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5616 return m_pItem->Value;
5618 T* operator->()
const
5620 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5621 return &m_pItem->Value;
5624 iterator& operator++()
5626 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5627 m_pItem = m_pItem->pNext;
5630 iterator& operator--()
5632 if(m_pItem != VMA_NULL)
5634 m_pItem = m_pItem->pPrev;
5638 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5639 m_pItem = m_pList->Back();
5644 iterator operator++(
int)
5646 iterator result = *
this;
5650 iterator operator--(
int)
5652 iterator result = *
this;
5657 bool operator==(
const iterator& rhs)
const
5659 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5660 return m_pItem == rhs.m_pItem;
5662 bool operator!=(
const iterator& rhs)
const
5664 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5665 return m_pItem != rhs.m_pItem;
5669 VmaRawList<T>* m_pList;
5670 VmaListItem<T>* m_pItem;
5672 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5678 friend class VmaList<T, AllocatorT>;
5681 class const_iterator
5690 const_iterator(
const iterator& src) :
5691 m_pList(src.m_pList),
5692 m_pItem(src.m_pItem)
5696 const T& operator*()
const
5698 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5699 return m_pItem->Value;
5701 const T* operator->()
const
5703 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5704 return &m_pItem->Value;
5707 const_iterator& operator++()
5709 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5710 m_pItem = m_pItem->pNext;
5713 const_iterator& operator--()
5715 if(m_pItem != VMA_NULL)
5717 m_pItem = m_pItem->pPrev;
5721 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5722 m_pItem = m_pList->Back();
5727 const_iterator operator++(
int)
5729 const_iterator result = *
this;
5733 const_iterator operator--(
int)
5735 const_iterator result = *
this;
5740 bool operator==(
const const_iterator& rhs)
const
5742 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5743 return m_pItem == rhs.m_pItem;
5745 bool operator!=(
const const_iterator& rhs)
const
5747 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5748 return m_pItem != rhs.m_pItem;
5752 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5758 const VmaRawList<T>* m_pList;
5759 const VmaListItem<T>* m_pItem;
5761 friend class VmaList<T, AllocatorT>;
5764 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5766 bool empty()
const {
return m_RawList.IsEmpty(); }
5767 size_t size()
const {
return m_RawList.GetCount(); }
5769 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5770 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5772 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5773 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5775 void clear() { m_RawList.Clear(); }
5776 void push_back(
const T& value) { m_RawList.PushBack(value); }
5777 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5778 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5781 VmaRawList<T> m_RawList;
5784 #endif // #if VMA_USE_STL_LIST
5792 #if VMA_USE_STL_UNORDERED_MAP
5794 #define VmaPair std::pair
5796 #define VMA_MAP_TYPE(KeyT, ValueT) \
5797 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5799 #else // #if VMA_USE_STL_UNORDERED_MAP
5801 template<
typename T1,
typename T2>
5807 VmaPair() : first(), second() { }
5808 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5814 template<
typename KeyT,
typename ValueT>
5818 typedef VmaPair<KeyT, ValueT> PairType;
5819 typedef PairType* iterator;
5821 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5823 iterator begin() {
return m_Vector.begin(); }
5824 iterator end() {
return m_Vector.end(); }
5826 void insert(
const PairType& pair);
5827 iterator find(
const KeyT& key);
5828 void erase(iterator it);
5831 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5834 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5836 template<
typename FirstT,
typename SecondT>
5837 struct VmaPairFirstLess
5839 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5841 return lhs.first < rhs.first;
5843 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5845 return lhs.first < rhsFirst;
5849 template<
typename KeyT,
typename ValueT>
5850 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5852 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5854 m_Vector.data() + m_Vector.size(),
5856 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5857 VmaVectorInsert(m_Vector, indexToInsert, pair);
5860 template<
typename KeyT,
typename ValueT>
5861 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5863 PairType* it = VmaBinaryFindFirstNotLess(
5865 m_Vector.data() + m_Vector.size(),
5867 VmaPairFirstLess<KeyT, ValueT>());
5868 if((it != m_Vector.end()) && (it->first == key))
5874 return m_Vector.end();
5878 template<
typename KeyT,
typename ValueT>
5879 void VmaMap<KeyT, ValueT>::erase(iterator it)
5881 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5884 #endif // #if VMA_USE_STL_UNORDERED_MAP
5890 class VmaDeviceMemoryBlock;
5892 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5894 struct VmaAllocation_T
5897 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5901 FLAG_USER_DATA_STRING = 0x01,
5905 enum ALLOCATION_TYPE
5907 ALLOCATION_TYPE_NONE,
5908 ALLOCATION_TYPE_BLOCK,
5909 ALLOCATION_TYPE_DEDICATED,
5916 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5919 m_pUserData{VMA_NULL},
5920 m_LastUseFrameIndex{currentFrameIndex},
5921 m_MemoryTypeIndex{0},
5922 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5923 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5925 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5927 #if VMA_STATS_STRING_ENABLED
5928 m_CreationFrameIndex = currentFrameIndex;
5929 m_BufferImageUsage = 0;
5935 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5938 VMA_ASSERT(m_pUserData == VMA_NULL);
5941 void InitBlockAllocation(
5942 VmaDeviceMemoryBlock* block,
5943 VkDeviceSize offset,
5944 VkDeviceSize alignment,
5946 uint32_t memoryTypeIndex,
5947 VmaSuballocationType suballocationType,
5951 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5952 VMA_ASSERT(block != VMA_NULL);
5953 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5954 m_Alignment = alignment;
5956 m_MemoryTypeIndex = memoryTypeIndex;
5957 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5958 m_SuballocationType = (uint8_t)suballocationType;
5959 m_BlockAllocation.m_Block = block;
5960 m_BlockAllocation.m_Offset = offset;
5961 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5966 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5967 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5968 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5969 m_MemoryTypeIndex = 0;
5970 m_BlockAllocation.m_Block = VMA_NULL;
5971 m_BlockAllocation.m_Offset = 0;
5972 m_BlockAllocation.m_CanBecomeLost =
true;
5975 void ChangeBlockAllocation(
5977 VmaDeviceMemoryBlock* block,
5978 VkDeviceSize offset);
5980 void ChangeOffset(VkDeviceSize newOffset);
5983 void InitDedicatedAllocation(
5984 uint32_t memoryTypeIndex,
5985 VkDeviceMemory hMemory,
5986 VmaSuballocationType suballocationType,
5990 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5991 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5992 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5995 m_MemoryTypeIndex = memoryTypeIndex;
5996 m_SuballocationType = (uint8_t)suballocationType;
5997 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5998 m_DedicatedAllocation.m_hMemory = hMemory;
5999 m_DedicatedAllocation.m_pMappedData = pMappedData;
6002 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6003 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6004 VkDeviceSize GetSize()
const {
return m_Size; }
6005 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6006 void* GetUserData()
const {
return m_pUserData; }
6007 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6008 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6010 VmaDeviceMemoryBlock* GetBlock()
const
6012 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6013 return m_BlockAllocation.m_Block;
6015 VkDeviceSize GetOffset()
const;
6016 VkDeviceMemory GetMemory()
const;
6017 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6018 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6019 void* GetMappedData()
const;
6020 bool CanBecomeLost()
const;
6022 uint32_t GetLastUseFrameIndex()
const
6024 return m_LastUseFrameIndex.load();
6026 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6028 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6038 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6040 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6042 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6053 void BlockAllocMap();
6054 void BlockAllocUnmap();
6055 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6058 #if VMA_STATS_STRING_ENABLED
6059 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6060 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6062 void InitBufferImageUsage(uint32_t bufferImageUsage)
6064 VMA_ASSERT(m_BufferImageUsage == 0);
6065 m_BufferImageUsage = bufferImageUsage;
6068 void PrintParameters(
class VmaJsonWriter& json)
const;
6072 VkDeviceSize m_Alignment;
6073 VkDeviceSize m_Size;
6075 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6076 uint32_t m_MemoryTypeIndex;
6078 uint8_t m_SuballocationType;
6085 struct BlockAllocation
6087 VmaDeviceMemoryBlock* m_Block;
6088 VkDeviceSize m_Offset;
6089 bool m_CanBecomeLost;
6093 struct DedicatedAllocation
6095 VkDeviceMemory m_hMemory;
6096 void* m_pMappedData;
6102 BlockAllocation m_BlockAllocation;
6104 DedicatedAllocation m_DedicatedAllocation;
6107 #if VMA_STATS_STRING_ENABLED
6108 uint32_t m_CreationFrameIndex;
6109 uint32_t m_BufferImageUsage;
6119 struct VmaSuballocation
6121 VkDeviceSize offset;
6124 VmaSuballocationType type;
6128 struct VmaSuballocationOffsetLess
6130 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6132 return lhs.offset < rhs.offset;
6135 struct VmaSuballocationOffsetGreater
6137 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6139 return lhs.offset > rhs.offset;
6143 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6146 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6148 enum class VmaAllocationRequestType
6170 struct VmaAllocationRequest
6172 VkDeviceSize offset;
6173 VkDeviceSize sumFreeSize;
6174 VkDeviceSize sumItemSize;
6175 VmaSuballocationList::iterator item;
6176 size_t itemsToMakeLostCount;
6178 VmaAllocationRequestType type;
6180 VkDeviceSize CalcCost()
const
6182 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6190 class VmaBlockMetadata
6194 virtual ~VmaBlockMetadata() { }
6195 virtual void Init(VkDeviceSize size) { m_Size = size; }
6198 virtual bool Validate()
const = 0;
6199 VkDeviceSize GetSize()
const {
return m_Size; }
6200 virtual size_t GetAllocationCount()
const = 0;
6201 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6202 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6204 virtual bool IsEmpty()
const = 0;
6206 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6208 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6210 #if VMA_STATS_STRING_ENABLED
6211 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6217 virtual bool CreateAllocationRequest(
6218 uint32_t currentFrameIndex,
6219 uint32_t frameInUseCount,
6220 VkDeviceSize bufferImageGranularity,
6221 VkDeviceSize allocSize,
6222 VkDeviceSize allocAlignment,
6224 VmaSuballocationType allocType,
6225 bool canMakeOtherLost,
6228 VmaAllocationRequest* pAllocationRequest) = 0;
6230 virtual bool MakeRequestedAllocationsLost(
6231 uint32_t currentFrameIndex,
6232 uint32_t frameInUseCount,
6233 VmaAllocationRequest* pAllocationRequest) = 0;
6235 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6237 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6241 const VmaAllocationRequest& request,
6242 VmaSuballocationType type,
6243 VkDeviceSize allocSize,
6248 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6251 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6253 #if VMA_STATS_STRING_ENABLED
6254 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6255 VkDeviceSize unusedBytes,
6256 size_t allocationCount,
6257 size_t unusedRangeCount)
const;
6258 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6259 VkDeviceSize offset,
6261 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6262 VkDeviceSize offset,
6263 VkDeviceSize size)
const;
6264 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6268 VkDeviceSize m_Size;
6269 const VkAllocationCallbacks* m_pAllocationCallbacks;
6272 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6273 VMA_ASSERT(0 && "Validation failed: " #cond); \
6277 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6279 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6282 virtual ~VmaBlockMetadata_Generic();
6283 virtual void Init(VkDeviceSize size);
6285 virtual bool Validate()
const;
6286 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6287 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6288 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6289 virtual bool IsEmpty()
const;
6291 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6292 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6294 #if VMA_STATS_STRING_ENABLED
6295 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6298 virtual bool CreateAllocationRequest(
6299 uint32_t currentFrameIndex,
6300 uint32_t frameInUseCount,
6301 VkDeviceSize bufferImageGranularity,
6302 VkDeviceSize allocSize,
6303 VkDeviceSize allocAlignment,
6305 VmaSuballocationType allocType,
6306 bool canMakeOtherLost,
6308 VmaAllocationRequest* pAllocationRequest);
6310 virtual bool MakeRequestedAllocationsLost(
6311 uint32_t currentFrameIndex,
6312 uint32_t frameInUseCount,
6313 VmaAllocationRequest* pAllocationRequest);
6315 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6317 virtual VkResult CheckCorruption(
const void* pBlockData);
6320 const VmaAllocationRequest& request,
6321 VmaSuballocationType type,
6322 VkDeviceSize allocSize,
6326 virtual void FreeAtOffset(VkDeviceSize offset);
6331 bool IsBufferImageGranularityConflictPossible(
6332 VkDeviceSize bufferImageGranularity,
6333 VmaSuballocationType& inOutPrevSuballocType)
const;
6336 friend class VmaDefragmentationAlgorithm_Generic;
6337 friend class VmaDefragmentationAlgorithm_Fast;
6339 uint32_t m_FreeCount;
6340 VkDeviceSize m_SumFreeSize;
6341 VmaSuballocationList m_Suballocations;
6344 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6346 bool ValidateFreeSuballocationList()
const;
6350 bool CheckAllocation(
6351 uint32_t currentFrameIndex,
6352 uint32_t frameInUseCount,
6353 VkDeviceSize bufferImageGranularity,
6354 VkDeviceSize allocSize,
6355 VkDeviceSize allocAlignment,
6356 VmaSuballocationType allocType,
6357 VmaSuballocationList::const_iterator suballocItem,
6358 bool canMakeOtherLost,
6359 VkDeviceSize* pOffset,
6360 size_t* itemsToMakeLostCount,
6361 VkDeviceSize* pSumFreeSize,
6362 VkDeviceSize* pSumItemSize)
const;
6364 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6368 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6371 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6374 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6455 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6457 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6460 virtual ~VmaBlockMetadata_Linear();
6461 virtual void Init(VkDeviceSize size);
6463 virtual bool Validate()
const;
6464 virtual size_t GetAllocationCount()
const;
6465 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6466 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6467 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6469 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6470 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6472 #if VMA_STATS_STRING_ENABLED
6473 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6476 virtual bool CreateAllocationRequest(
6477 uint32_t currentFrameIndex,
6478 uint32_t frameInUseCount,
6479 VkDeviceSize bufferImageGranularity,
6480 VkDeviceSize allocSize,
6481 VkDeviceSize allocAlignment,
6483 VmaSuballocationType allocType,
6484 bool canMakeOtherLost,
6486 VmaAllocationRequest* pAllocationRequest);
6488 virtual bool MakeRequestedAllocationsLost(
6489 uint32_t currentFrameIndex,
6490 uint32_t frameInUseCount,
6491 VmaAllocationRequest* pAllocationRequest);
6493 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6495 virtual VkResult CheckCorruption(
const void* pBlockData);
6498 const VmaAllocationRequest& request,
6499 VmaSuballocationType type,
6500 VkDeviceSize allocSize,
6504 virtual void FreeAtOffset(VkDeviceSize offset);
6514 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6516 enum SECOND_VECTOR_MODE
6518 SECOND_VECTOR_EMPTY,
6523 SECOND_VECTOR_RING_BUFFER,
6529 SECOND_VECTOR_DOUBLE_STACK,
6532 VkDeviceSize m_SumFreeSize;
6533 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6534 uint32_t m_1stVectorIndex;
6535 SECOND_VECTOR_MODE m_2ndVectorMode;
6537 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6538 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6539 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6540 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6543 size_t m_1stNullItemsBeginCount;
6545 size_t m_1stNullItemsMiddleCount;
6547 size_t m_2ndNullItemsCount;
6549 bool ShouldCompact1st()
const;
6550 void CleanupAfterFree();
6552 bool CreateAllocationRequest_LowerAddress(
6553 uint32_t currentFrameIndex,
6554 uint32_t frameInUseCount,
6555 VkDeviceSize bufferImageGranularity,
6556 VkDeviceSize allocSize,
6557 VkDeviceSize allocAlignment,
6558 VmaSuballocationType allocType,
6559 bool canMakeOtherLost,
6561 VmaAllocationRequest* pAllocationRequest);
6562 bool CreateAllocationRequest_UpperAddress(
6563 uint32_t currentFrameIndex,
6564 uint32_t frameInUseCount,
6565 VkDeviceSize bufferImageGranularity,
6566 VkDeviceSize allocSize,
6567 VkDeviceSize allocAlignment,
6568 VmaSuballocationType allocType,
6569 bool canMakeOtherLost,
6571 VmaAllocationRequest* pAllocationRequest);
6585 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6587 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6590 virtual ~VmaBlockMetadata_Buddy();
6591 virtual void Init(VkDeviceSize size);
6593 virtual bool Validate()
const;
6594 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6595 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6596 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6597 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6599 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6600 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6602 #if VMA_STATS_STRING_ENABLED
6603 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6606 virtual bool CreateAllocationRequest(
6607 uint32_t currentFrameIndex,
6608 uint32_t frameInUseCount,
6609 VkDeviceSize bufferImageGranularity,
6610 VkDeviceSize allocSize,
6611 VkDeviceSize allocAlignment,
6613 VmaSuballocationType allocType,
6614 bool canMakeOtherLost,
6616 VmaAllocationRequest* pAllocationRequest);
6618 virtual bool MakeRequestedAllocationsLost(
6619 uint32_t currentFrameIndex,
6620 uint32_t frameInUseCount,
6621 VmaAllocationRequest* pAllocationRequest);
6623 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6625 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6628 const VmaAllocationRequest& request,
6629 VmaSuballocationType type,
6630 VkDeviceSize allocSize,
6633 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6634 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6637 static const VkDeviceSize MIN_NODE_SIZE = 32;
6638 static const size_t MAX_LEVELS = 30;
6640 struct ValidationContext
6642 size_t calculatedAllocationCount;
6643 size_t calculatedFreeCount;
6644 VkDeviceSize calculatedSumFreeSize;
6646 ValidationContext() :
6647 calculatedAllocationCount(0),
6648 calculatedFreeCount(0),
6649 calculatedSumFreeSize(0) { }
6654 VkDeviceSize offset;
6684 VkDeviceSize m_UsableSize;
6685 uint32_t m_LevelCount;
6691 } m_FreeList[MAX_LEVELS];
6693 size_t m_AllocationCount;
6697 VkDeviceSize m_SumFreeSize;
6699 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6700 void DeleteNode(Node* node);
6701 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6702 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6703 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6705 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6706 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6710 void AddToFreeListFront(uint32_t level, Node* node);
6714 void RemoveFromFreeList(uint32_t level, Node* node);
6716 #if VMA_STATS_STRING_ENABLED
6717 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6727 class VmaDeviceMemoryBlock
6729 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6731 VmaBlockMetadata* m_pMetadata;
6735 ~VmaDeviceMemoryBlock()
6737 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6738 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6745 uint32_t newMemoryTypeIndex,
6746 VkDeviceMemory newMemory,
6747 VkDeviceSize newSize,
6749 uint32_t algorithm);
6753 VmaPool GetParentPool()
const {
return m_hParentPool; }
6754 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6755 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6756 uint32_t GetId()
const {
return m_Id; }
6757 void* GetMappedData()
const {
return m_pMappedData; }
6760 bool Validate()
const;
6765 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6768 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6769 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6771 VkResult BindBufferMemory(
6774 VkDeviceSize allocationLocalOffset,
6777 VkResult BindImageMemory(
6780 VkDeviceSize allocationLocalOffset,
6786 uint32_t m_MemoryTypeIndex;
6788 VkDeviceMemory m_hMemory;
6796 uint32_t m_MapCount;
6797 void* m_pMappedData;
6800 struct VmaPointerLess
6802 bool operator()(
const void* lhs,
const void* rhs)
const
6808 struct VmaDefragmentationMove
6810 size_t srcBlockIndex;
6811 size_t dstBlockIndex;
6812 VkDeviceSize srcOffset;
6813 VkDeviceSize dstOffset;
6816 VmaDeviceMemoryBlock* pSrcBlock;
6817 VmaDeviceMemoryBlock* pDstBlock;
6820 class VmaDefragmentationAlgorithm;
6828 struct VmaBlockVector
6830 VMA_CLASS_NO_COPY(VmaBlockVector)
6835 uint32_t memoryTypeIndex,
6836 VkDeviceSize preferredBlockSize,
6837 size_t minBlockCount,
6838 size_t maxBlockCount,
6839 VkDeviceSize bufferImageGranularity,
6840 uint32_t frameInUseCount,
6841 bool explicitBlockSize,
6842 uint32_t algorithm);
6845 VkResult CreateMinBlocks();
6847 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6848 VmaPool GetParentPool()
const {
return m_hParentPool; }
6849 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6850 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6851 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6852 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6853 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6854 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6859 bool IsCorruptionDetectionEnabled()
const;
6862 uint32_t currentFrameIndex,
6864 VkDeviceSize alignment,
6866 VmaSuballocationType suballocType,
6867 size_t allocationCount,
6875 #if VMA_STATS_STRING_ENABLED
6876 void PrintDetailedMap(
class VmaJsonWriter& json);
6879 void MakePoolAllocationsLost(
6880 uint32_t currentFrameIndex,
6881 size_t* pLostAllocationCount);
6882 VkResult CheckCorruption();
6886 class VmaBlockVectorDefragmentationContext* pCtx,
6888 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6889 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6890 VkCommandBuffer commandBuffer);
6891 void DefragmentationEnd(
6892 class VmaBlockVectorDefragmentationContext* pCtx,
6896 uint32_t ProcessDefragmentations(
6897 class VmaBlockVectorDefragmentationContext *pCtx,
6900 void CommitDefragmentations(
6901 class VmaBlockVectorDefragmentationContext *pCtx,
6907 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6908 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6909 size_t CalcAllocationCount()
const;
6910 bool IsBufferImageGranularityConflictPossible()
const;
6913 friend class VmaDefragmentationAlgorithm_Generic;
6917 const uint32_t m_MemoryTypeIndex;
6918 const VkDeviceSize m_PreferredBlockSize;
6919 const size_t m_MinBlockCount;
6920 const size_t m_MaxBlockCount;
6921 const VkDeviceSize m_BufferImageGranularity;
6922 const uint32_t m_FrameInUseCount;
6923 const bool m_ExplicitBlockSize;
6924 const uint32_t m_Algorithm;
6925 VMA_RW_MUTEX m_Mutex;
6929 bool m_HasEmptyBlock;
6931 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6932 uint32_t m_NextBlockId;
6934 VkDeviceSize CalcMaxBlockSize()
const;
6937 void Remove(VmaDeviceMemoryBlock* pBlock);
6941 void IncrementallySortBlocks();
6943 VkResult AllocatePage(
6944 uint32_t currentFrameIndex,
6946 VkDeviceSize alignment,
6948 VmaSuballocationType suballocType,
6952 VkResult AllocateFromBlock(
6953 VmaDeviceMemoryBlock* pBlock,
6954 uint32_t currentFrameIndex,
6956 VkDeviceSize alignment,
6959 VmaSuballocationType suballocType,
6963 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6966 void ApplyDefragmentationMovesCpu(
6967 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6968 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6970 void ApplyDefragmentationMovesGpu(
6971 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6972 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6973 VkCommandBuffer commandBuffer);
6981 void UpdateHasEmptyBlock();
6986 VMA_CLASS_NO_COPY(VmaPool_T)
6988 VmaBlockVector m_BlockVector;
6993 VkDeviceSize preferredBlockSize);
6996 uint32_t GetId()
const {
return m_Id; }
6997 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6999 const char* GetName()
const {
return m_Name; }
7000 void SetName(
const char* pName);
7002 #if VMA_STATS_STRING_ENABLED
7018 class VmaDefragmentationAlgorithm
7020 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7022 VmaDefragmentationAlgorithm(
7024 VmaBlockVector* pBlockVector,
7025 uint32_t currentFrameIndex) :
7026 m_hAllocator(hAllocator),
7027 m_pBlockVector(pBlockVector),
7028 m_CurrentFrameIndex(currentFrameIndex)
7031 virtual ~VmaDefragmentationAlgorithm()
7035 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7036 virtual void AddAll() = 0;
7038 virtual VkResult Defragment(
7039 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7040 VkDeviceSize maxBytesToMove,
7041 uint32_t maxAllocationsToMove,
7044 virtual VkDeviceSize GetBytesMoved()
const = 0;
7045 virtual uint32_t GetAllocationsMoved()
const = 0;
7049 VmaBlockVector*
const m_pBlockVector;
7050 const uint32_t m_CurrentFrameIndex;
7052 struct AllocationInfo
7055 VkBool32* m_pChanged;
7058 m_hAllocation(VK_NULL_HANDLE),
7059 m_pChanged(VMA_NULL)
7063 m_hAllocation(hAlloc),
7064 m_pChanged(pChanged)
7070 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7072 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7074 VmaDefragmentationAlgorithm_Generic(
7076 VmaBlockVector* pBlockVector,
7077 uint32_t currentFrameIndex,
7078 bool overlappingMoveSupported);
7079 virtual ~VmaDefragmentationAlgorithm_Generic();
7081 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7082 virtual void AddAll() { m_AllAllocations =
true; }
7084 virtual VkResult Defragment(
7085 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7086 VkDeviceSize maxBytesToMove,
7087 uint32_t maxAllocationsToMove,
7090 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7091 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7094 uint32_t m_AllocationCount;
7095 bool m_AllAllocations;
7097 VkDeviceSize m_BytesMoved;
7098 uint32_t m_AllocationsMoved;
7100 struct AllocationInfoSizeGreater
7102 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7104 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7108 struct AllocationInfoOffsetGreater
7110 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7112 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7118 size_t m_OriginalBlockIndex;
7119 VmaDeviceMemoryBlock* m_pBlock;
7120 bool m_HasNonMovableAllocations;
7121 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7123 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7124 m_OriginalBlockIndex(SIZE_MAX),
7126 m_HasNonMovableAllocations(true),
7127 m_Allocations(pAllocationCallbacks)
7131 void CalcHasNonMovableAllocations()
7133 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7134 const size_t defragmentAllocCount = m_Allocations.size();
7135 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7138 void SortAllocationsBySizeDescending()
7140 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7143 void SortAllocationsByOffsetDescending()
7145 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7149 struct BlockPointerLess
7151 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7153 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7155 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7157 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7163 struct BlockInfoCompareMoveDestination
7165 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7167 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7171 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7175 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7183 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7184 BlockInfoVector m_Blocks;
7186 VkResult DefragmentRound(
7187 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7188 VkDeviceSize maxBytesToMove,
7189 uint32_t maxAllocationsToMove,
7190 bool freeOldAllocations);
7192 size_t CalcBlocksWithNonMovableCount()
const;
7194 static bool MoveMakesSense(
7195 size_t dstBlockIndex, VkDeviceSize dstOffset,
7196 size_t srcBlockIndex, VkDeviceSize srcOffset);
7199 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7201 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7203 VmaDefragmentationAlgorithm_Fast(
7205 VmaBlockVector* pBlockVector,
7206 uint32_t currentFrameIndex,
7207 bool overlappingMoveSupported);
7208 virtual ~VmaDefragmentationAlgorithm_Fast();
7210 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7211 virtual void AddAll() { m_AllAllocations =
true; }
7213 virtual VkResult Defragment(
7214 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7215 VkDeviceSize maxBytesToMove,
7216 uint32_t maxAllocationsToMove,
7219 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7220 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7225 size_t origBlockIndex;
7228 class FreeSpaceDatabase
7234 s.blockInfoIndex = SIZE_MAX;
7235 for(
size_t i = 0; i < MAX_COUNT; ++i)
7237 m_FreeSpaces[i] = s;
7241 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7243 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7249 size_t bestIndex = SIZE_MAX;
7250 for(
size_t i = 0; i < MAX_COUNT; ++i)
7253 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7258 if(m_FreeSpaces[i].size < size &&
7259 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7265 if(bestIndex != SIZE_MAX)
7267 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7268 m_FreeSpaces[bestIndex].offset = offset;
7269 m_FreeSpaces[bestIndex].size = size;
7273 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7274 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7276 size_t bestIndex = SIZE_MAX;
7277 VkDeviceSize bestFreeSpaceAfter = 0;
7278 for(
size_t i = 0; i < MAX_COUNT; ++i)
7281 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7283 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7285 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7287 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7289 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7292 bestFreeSpaceAfter = freeSpaceAfter;
7298 if(bestIndex != SIZE_MAX)
7300 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7301 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7303 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7306 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7307 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7308 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7313 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7323 static const size_t MAX_COUNT = 4;
7327 size_t blockInfoIndex;
7328 VkDeviceSize offset;
7330 } m_FreeSpaces[MAX_COUNT];
7333 const bool m_OverlappingMoveSupported;
7335 uint32_t m_AllocationCount;
7336 bool m_AllAllocations;
7338 VkDeviceSize m_BytesMoved;
7339 uint32_t m_AllocationsMoved;
7341 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7343 void PreprocessMetadata();
7344 void PostprocessMetadata();
7345 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7348 struct VmaBlockDefragmentationContext
7352 BLOCK_FLAG_USED = 0x00000001,
7358 class VmaBlockVectorDefragmentationContext
7360 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7364 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7365 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7366 uint32_t defragmentationMovesProcessed;
7367 uint32_t defragmentationMovesCommitted;
7368 bool hasDefragmentationPlan;
7370 VmaBlockVectorDefragmentationContext(
7373 VmaBlockVector* pBlockVector,
7374 uint32_t currFrameIndex);
7375 ~VmaBlockVectorDefragmentationContext();
7377 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7378 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7379 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7381 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7382 void AddAll() { m_AllAllocations =
true; }
7391 VmaBlockVector*
const m_pBlockVector;
7392 const uint32_t m_CurrFrameIndex;
7394 VmaDefragmentationAlgorithm* m_pAlgorithm;
7402 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7403 bool m_AllAllocations;
7406 struct VmaDefragmentationContext_T
7409 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7411 VmaDefragmentationContext_T(
7413 uint32_t currFrameIndex,
7416 ~VmaDefragmentationContext_T();
7418 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7419 void AddAllocations(
7420 uint32_t allocationCount,
7422 VkBool32* pAllocationsChanged);
7430 VkResult Defragment(
7431 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7432 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7436 VkResult DefragmentPassEnd();
7440 const uint32_t m_CurrFrameIndex;
7441 const uint32_t m_Flags;
7444 VkDeviceSize m_MaxCpuBytesToMove;
7445 uint32_t m_MaxCpuAllocationsToMove;
7446 VkDeviceSize m_MaxGpuBytesToMove;
7447 uint32_t m_MaxGpuAllocationsToMove;
7450 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7452 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7455 #if VMA_RECORDING_ENABLED
7462 void WriteConfiguration(
7463 const VkPhysicalDeviceProperties& devProps,
7464 const VkPhysicalDeviceMemoryProperties& memProps,
7465 uint32_t vulkanApiVersion,
7466 bool dedicatedAllocationExtensionEnabled,
7467 bool bindMemory2ExtensionEnabled,
7468 bool memoryBudgetExtensionEnabled,
7469 bool deviceCoherentMemoryExtensionEnabled);
7472 void RecordCreateAllocator(uint32_t frameIndex);
7473 void RecordDestroyAllocator(uint32_t frameIndex);
7474 void RecordCreatePool(uint32_t frameIndex,
7477 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7478 void RecordAllocateMemory(uint32_t frameIndex,
7479 const VkMemoryRequirements& vkMemReq,
7482 void RecordAllocateMemoryPages(uint32_t frameIndex,
7483 const VkMemoryRequirements& vkMemReq,
7485 uint64_t allocationCount,
7487 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7488 const VkMemoryRequirements& vkMemReq,
7489 bool requiresDedicatedAllocation,
7490 bool prefersDedicatedAllocation,
7493 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7494 const VkMemoryRequirements& vkMemReq,
7495 bool requiresDedicatedAllocation,
7496 bool prefersDedicatedAllocation,
7499 void RecordFreeMemory(uint32_t frameIndex,
7501 void RecordFreeMemoryPages(uint32_t frameIndex,
7502 uint64_t allocationCount,
7504 void RecordSetAllocationUserData(uint32_t frameIndex,
7506 const void* pUserData);
7507 void RecordCreateLostAllocation(uint32_t frameIndex,
7509 void RecordMapMemory(uint32_t frameIndex,
7511 void RecordUnmapMemory(uint32_t frameIndex,
7513 void RecordFlushAllocation(uint32_t frameIndex,
7514 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7515 void RecordInvalidateAllocation(uint32_t frameIndex,
7516 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7517 void RecordCreateBuffer(uint32_t frameIndex,
7518 const VkBufferCreateInfo& bufCreateInfo,
7521 void RecordCreateImage(uint32_t frameIndex,
7522 const VkImageCreateInfo& imageCreateInfo,
7525 void RecordDestroyBuffer(uint32_t frameIndex,
7527 void RecordDestroyImage(uint32_t frameIndex,
7529 void RecordTouchAllocation(uint32_t frameIndex,
7531 void RecordGetAllocationInfo(uint32_t frameIndex,
7533 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7535 void RecordDefragmentationBegin(uint32_t frameIndex,
7538 void RecordDefragmentationEnd(uint32_t frameIndex,
7540 void RecordSetPoolName(uint32_t frameIndex,
7551 class UserDataString
7555 const char* GetString()
const {
return m_Str; }
7565 VMA_MUTEX m_FileMutex;
7566 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
7568 void GetBasicParams(CallParams& outParams);
7571 template<
typename T>
7572 void PrintPointerList(uint64_t count,
const T* pItems)
7576 fprintf(m_File,
"%p", pItems[0]);
7577 for(uint64_t i = 1; i < count; ++i)
7579 fprintf(m_File,
" %p", pItems[i]);
7584 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7588 #endif // #if VMA_RECORDING_ENABLED
7593 class VmaAllocationObjectAllocator
7595 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7597 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7599 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7604 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7607 struct VmaCurrentBudgetData
7609 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7610 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7612 #if VMA_MEMORY_BUDGET
7613 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7614 VMA_RW_MUTEX m_BudgetMutex;
7615 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7616 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7617 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7618 #endif // #if VMA_MEMORY_BUDGET
7620 VmaCurrentBudgetData()
7622 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7624 m_BlockBytes[heapIndex] = 0;
7625 m_AllocationBytes[heapIndex] = 0;
7626 #if VMA_MEMORY_BUDGET
7627 m_VulkanUsage[heapIndex] = 0;
7628 m_VulkanBudget[heapIndex] = 0;
7629 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7633 #if VMA_MEMORY_BUDGET
7634 m_OperationsSinceBudgetFetch = 0;
7638 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7640 m_AllocationBytes[heapIndex] += allocationSize;
7641 #if VMA_MEMORY_BUDGET
7642 ++m_OperationsSinceBudgetFetch;
7646 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7648 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7649 m_AllocationBytes[heapIndex] -= allocationSize;
7650 #if VMA_MEMORY_BUDGET
7651 ++m_OperationsSinceBudgetFetch;
7657 struct VmaAllocator_T
7659 VMA_CLASS_NO_COPY(VmaAllocator_T)
7662 uint32_t m_VulkanApiVersion;
7663 bool m_UseKhrDedicatedAllocation;
7664 bool m_UseKhrBindMemory2;
7665 bool m_UseExtMemoryBudget;
7666 bool m_UseAmdDeviceCoherentMemory;
7667 bool m_UseKhrBufferDeviceAddress;
7669 VkInstance m_hInstance;
7670 bool m_AllocationCallbacksSpecified;
7671 VkAllocationCallbacks m_AllocationCallbacks;
7673 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7676 uint32_t m_HeapSizeLimitMask;
7678 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7679 VkPhysicalDeviceMemoryProperties m_MemProps;
7682 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7685 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7686 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7687 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7689 VmaCurrentBudgetData m_Budget;
7695 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7697 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7701 return m_VulkanFunctions;
7704 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7706 VkDeviceSize GetBufferImageGranularity()
const
7709 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7710 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7713 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7714 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7716 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7718 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7719 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7722 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7724 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7725 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7728 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7730 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7731 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7732 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7735 bool IsIntegratedGpu()
const
7737 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7740 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7742 #if VMA_RECORDING_ENABLED
7743 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7746 void GetBufferMemoryRequirements(
7748 VkMemoryRequirements& memReq,
7749 bool& requiresDedicatedAllocation,
7750 bool& prefersDedicatedAllocation)
const;
7751 void GetImageMemoryRequirements(
7753 VkMemoryRequirements& memReq,
7754 bool& requiresDedicatedAllocation,
7755 bool& prefersDedicatedAllocation)
const;
7758 VkResult AllocateMemory(
7759 const VkMemoryRequirements& vkMemReq,
7760 bool requiresDedicatedAllocation,
7761 bool prefersDedicatedAllocation,
7762 VkBuffer dedicatedBuffer,
7763 VkBufferUsageFlags dedicatedBufferUsage,
7764 VkImage dedicatedImage,
7766 VmaSuballocationType suballocType,
7767 size_t allocationCount,
7772 size_t allocationCount,
7775 VkResult ResizeAllocation(
7777 VkDeviceSize newSize);
7779 void CalculateStats(
VmaStats* pStats);
7782 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7784 #if VMA_STATS_STRING_ENABLED
7785 void PrintDetailedMap(
class VmaJsonWriter& json);
7788 VkResult DefragmentationBegin(
7792 VkResult DefragmentationEnd(
7795 VkResult DefragmentationPassBegin(
7798 VkResult DefragmentationPassEnd(
7805 void DestroyPool(
VmaPool pool);
7808 void SetCurrentFrameIndex(uint32_t frameIndex);
7809 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7811 void MakePoolAllocationsLost(
7813 size_t* pLostAllocationCount);
7814 VkResult CheckPoolCorruption(
VmaPool hPool);
7815 VkResult CheckCorruption(uint32_t memoryTypeBits);
7820 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7822 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7824 VkResult BindVulkanBuffer(
7825 VkDeviceMemory memory,
7826 VkDeviceSize memoryOffset,
7830 VkResult BindVulkanImage(
7831 VkDeviceMemory memory,
7832 VkDeviceSize memoryOffset,
7839 VkResult BindBufferMemory(
7841 VkDeviceSize allocationLocalOffset,
7844 VkResult BindImageMemory(
7846 VkDeviceSize allocationLocalOffset,
7850 VkResult FlushOrInvalidateAllocation(
7852 VkDeviceSize offset, VkDeviceSize size,
7853 VMA_CACHE_OPERATION op);
7854 VkResult FlushOrInvalidateAllocations(
7855 uint32_t allocationCount,
7857 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
7858 VMA_CACHE_OPERATION op);
7860 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7866 uint32_t GetGpuDefragmentationMemoryTypeBits();
7869 VkDeviceSize m_PreferredLargeHeapBlockSize;
7871 VkPhysicalDevice m_PhysicalDevice;
7872 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7873 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7875 VMA_RW_MUTEX m_PoolsMutex;
7877 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7878 uint32_t m_NextPoolId;
7883 uint32_t m_GlobalMemoryTypeBits;
7885 #if VMA_RECORDING_ENABLED
7886 VmaRecorder* m_pRecorder;
7891 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
7892 void ImportVulkanFunctions_Static();
7897 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
7898 void ImportVulkanFunctions_Dynamic();
7901 void ValidateVulkanFunctions();
7903 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7905 VkResult AllocateMemoryOfType(
7907 VkDeviceSize alignment,
7908 bool dedicatedAllocation,
7909 VkBuffer dedicatedBuffer,
7910 VkBufferUsageFlags dedicatedBufferUsage,
7911 VkImage dedicatedImage,
7913 uint32_t memTypeIndex,
7914 VmaSuballocationType suballocType,
7915 size_t allocationCount,
7919 VkResult AllocateDedicatedMemoryPage(
7921 VmaSuballocationType suballocType,
7922 uint32_t memTypeIndex,
7923 const VkMemoryAllocateInfo& allocInfo,
7925 bool isUserDataString,
7930 VkResult AllocateDedicatedMemory(
7932 VmaSuballocationType suballocType,
7933 uint32_t memTypeIndex,
7936 bool isUserDataString,
7938 VkBuffer dedicatedBuffer,
7939 VkBufferUsageFlags dedicatedBufferUsage,
7940 VkImage dedicatedImage,
7941 size_t allocationCount,
7950 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7952 uint32_t CalculateGlobalMemoryTypeBits()
const;
7954 bool GetFlushOrInvalidateRange(
7956 VkDeviceSize offset, VkDeviceSize size,
7957 VkMappedMemoryRange& outRange)
const;
7959 #if VMA_MEMORY_BUDGET
7960 void UpdateVulkanBudget();
7961 #endif // #if VMA_MEMORY_BUDGET
7967 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7969 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7972 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7974 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7977 template<
typename T>
7980 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7983 template<
typename T>
7984 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7986 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7989 template<
typename T>
7990 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7995 VmaFree(hAllocator, ptr);
7999 template<
typename T>
8000 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8004 for(
size_t i = count; i--; )
8006 VmaFree(hAllocator, ptr);
8013 #if VMA_STATS_STRING_ENABLED
8015 class VmaStringBuilder
8018 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8019 size_t GetLength()
const {
return m_Data.size(); }
8020 const char* GetData()
const {
return m_Data.data(); }
8022 void Add(
char ch) { m_Data.push_back(ch); }
8023 void Add(
const char* pStr);
8024 void AddNewLine() { Add(
'\n'); }
8025 void AddNumber(uint32_t num);
8026 void AddNumber(uint64_t num);
8027 void AddPointer(
const void* ptr);
8030 VmaVector< char, VmaStlAllocator<char> > m_Data;
8033 void VmaStringBuilder::Add(
const char* pStr)
8035 const size_t strLen = strlen(pStr);
8038 const size_t oldCount = m_Data.size();
8039 m_Data.resize(oldCount + strLen);
8040 memcpy(m_Data.data() + oldCount, pStr, strLen);
8044 void VmaStringBuilder::AddNumber(uint32_t num)
8051 *--p =
'0' + (num % 10);
8058 void VmaStringBuilder::AddNumber(uint64_t num)
8065 *--p =
'0' + (num % 10);
8072 void VmaStringBuilder::AddPointer(
const void* ptr)
8075 VmaPtrToStr(buf,
sizeof(buf), ptr);
8079 #endif // #if VMA_STATS_STRING_ENABLED
8084 #if VMA_STATS_STRING_ENABLED
8088 VMA_CLASS_NO_COPY(VmaJsonWriter)
8090 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8093 void BeginObject(
bool singleLine =
false);
8096 void BeginArray(
bool singleLine =
false);
8099 void WriteString(
const char* pStr);
8100 void BeginString(
const char* pStr = VMA_NULL);
8101 void ContinueString(
const char* pStr);
8102 void ContinueString(uint32_t n);
8103 void ContinueString(uint64_t n);
8104 void ContinueString_Pointer(
const void* ptr);
8105 void EndString(
const char* pStr = VMA_NULL);
8107 void WriteNumber(uint32_t n);
8108 void WriteNumber(uint64_t n);
8109 void WriteBool(
bool b);
8113 static const char*
const INDENT;
8115 enum COLLECTION_TYPE
8117 COLLECTION_TYPE_OBJECT,
8118 COLLECTION_TYPE_ARRAY,
8122 COLLECTION_TYPE type;
8123 uint32_t valueCount;
8124 bool singleLineMode;
8127 VmaStringBuilder& m_SB;
8128 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8129 bool m_InsideString;
8131 void BeginValue(
bool isString);
8132 void WriteIndent(
bool oneLess =
false);
8135 const char*
const VmaJsonWriter::INDENT =
" ";
8137 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8139 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8140 m_InsideString(false)
8144 VmaJsonWriter::~VmaJsonWriter()
8146 VMA_ASSERT(!m_InsideString);
8147 VMA_ASSERT(m_Stack.empty());
8150 void VmaJsonWriter::BeginObject(
bool singleLine)
8152 VMA_ASSERT(!m_InsideString);
8158 item.type = COLLECTION_TYPE_OBJECT;
8159 item.valueCount = 0;
8160 item.singleLineMode = singleLine;
8161 m_Stack.push_back(item);
8164 void VmaJsonWriter::EndObject()
8166 VMA_ASSERT(!m_InsideString);
8171 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8175 void VmaJsonWriter::BeginArray(
bool singleLine)
8177 VMA_ASSERT(!m_InsideString);
8183 item.type = COLLECTION_TYPE_ARRAY;
8184 item.valueCount = 0;
8185 item.singleLineMode = singleLine;
8186 m_Stack.push_back(item);
8189 void VmaJsonWriter::EndArray()
8191 VMA_ASSERT(!m_InsideString);
8196 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8200 void VmaJsonWriter::WriteString(
const char* pStr)
8206 void VmaJsonWriter::BeginString(
const char* pStr)
8208 VMA_ASSERT(!m_InsideString);
8212 m_InsideString =
true;
8213 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8215 ContinueString(pStr);
8219 void VmaJsonWriter::ContinueString(
const char* pStr)
8221 VMA_ASSERT(m_InsideString);
8223 const size_t strLen = strlen(pStr);
8224 for(
size_t i = 0; i < strLen; ++i)
8257 VMA_ASSERT(0 &&
"Character not currently supported.");
8263 void VmaJsonWriter::ContinueString(uint32_t n)
8265 VMA_ASSERT(m_InsideString);
8269 void VmaJsonWriter::ContinueString(uint64_t n)
8271 VMA_ASSERT(m_InsideString);
8275 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8277 VMA_ASSERT(m_InsideString);
8278 m_SB.AddPointer(ptr);
8281 void VmaJsonWriter::EndString(
const char* pStr)
8283 VMA_ASSERT(m_InsideString);
8284 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8286 ContinueString(pStr);
8289 m_InsideString =
false;
8292 void VmaJsonWriter::WriteNumber(uint32_t n)
8294 VMA_ASSERT(!m_InsideString);
8299 void VmaJsonWriter::WriteNumber(uint64_t n)
8301 VMA_ASSERT(!m_InsideString);
8306 void VmaJsonWriter::WriteBool(
bool b)
8308 VMA_ASSERT(!m_InsideString);
8310 m_SB.Add(b ?
"true" :
"false");
8313 void VmaJsonWriter::WriteNull()
8315 VMA_ASSERT(!m_InsideString);
8320 void VmaJsonWriter::BeginValue(
bool isString)
8322 if(!m_Stack.empty())
8324 StackItem& currItem = m_Stack.back();
8325 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8326 currItem.valueCount % 2 == 0)
8328 VMA_ASSERT(isString);
8331 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8332 currItem.valueCount % 2 != 0)
8336 else if(currItem.valueCount > 0)
8345 ++currItem.valueCount;
8349 void VmaJsonWriter::WriteIndent(
bool oneLess)
8351 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8355 size_t count = m_Stack.size();
8356 if(count > 0 && oneLess)
8360 for(
size_t i = 0; i < count; ++i)
8367 #endif // #if VMA_STATS_STRING_ENABLED
8371 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8373 if(IsUserDataString())
8375 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8377 FreeUserDataString(hAllocator);
8379 if(pUserData != VMA_NULL)
8381 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8386 m_pUserData = pUserData;
8390 void VmaAllocation_T::ChangeBlockAllocation(
8392 VmaDeviceMemoryBlock* block,
8393 VkDeviceSize offset)
8395 VMA_ASSERT(block != VMA_NULL);
8396 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8399 if(block != m_BlockAllocation.m_Block)
8401 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8402 if(IsPersistentMap())
8404 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8405 block->Map(hAllocator, mapRefCount, VMA_NULL);
8408 m_BlockAllocation.m_Block = block;
8409 m_BlockAllocation.m_Offset = offset;
8412 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8414 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8415 m_BlockAllocation.m_Offset = newOffset;
8418 VkDeviceSize VmaAllocation_T::GetOffset()
const
8422 case ALLOCATION_TYPE_BLOCK:
8423 return m_BlockAllocation.m_Offset;
8424 case ALLOCATION_TYPE_DEDICATED:
8432 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8436 case ALLOCATION_TYPE_BLOCK:
8437 return m_BlockAllocation.m_Block->GetDeviceMemory();
8438 case ALLOCATION_TYPE_DEDICATED:
8439 return m_DedicatedAllocation.m_hMemory;
8442 return VK_NULL_HANDLE;
8446 void* VmaAllocation_T::GetMappedData()
const
8450 case ALLOCATION_TYPE_BLOCK:
8453 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8454 VMA_ASSERT(pBlockData != VMA_NULL);
8455 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8462 case ALLOCATION_TYPE_DEDICATED:
8463 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8464 return m_DedicatedAllocation.m_pMappedData;
8471 bool VmaAllocation_T::CanBecomeLost()
const
8475 case ALLOCATION_TYPE_BLOCK:
8476 return m_BlockAllocation.m_CanBecomeLost;
8477 case ALLOCATION_TYPE_DEDICATED:
8485 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8487 VMA_ASSERT(CanBecomeLost());
8493 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8496 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8501 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8507 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8517 #if VMA_STATS_STRING_ENABLED
8520 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8529 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8531 json.WriteString(
"Type");
8532 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8534 json.WriteString(
"Size");
8535 json.WriteNumber(m_Size);
8537 if(m_pUserData != VMA_NULL)
8539 json.WriteString(
"UserData");
8540 if(IsUserDataString())
8542 json.WriteString((
const char*)m_pUserData);
8547 json.ContinueString_Pointer(m_pUserData);
8552 json.WriteString(
"CreationFrameIndex");
8553 json.WriteNumber(m_CreationFrameIndex);
8555 json.WriteString(
"LastUseFrameIndex");
8556 json.WriteNumber(GetLastUseFrameIndex());
8558 if(m_BufferImageUsage != 0)
8560 json.WriteString(
"Usage");
8561 json.WriteNumber(m_BufferImageUsage);
8567 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8569 VMA_ASSERT(IsUserDataString());
8570 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8571 m_pUserData = VMA_NULL;
8574 void VmaAllocation_T::BlockAllocMap()
8576 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8578 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8584 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8588 void VmaAllocation_T::BlockAllocUnmap()
8590 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8592 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8598 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8602 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8604 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8608 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8610 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8611 *ppData = m_DedicatedAllocation.m_pMappedData;
8617 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8618 return VK_ERROR_MEMORY_MAP_FAILED;
8623 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8624 hAllocator->m_hDevice,
8625 m_DedicatedAllocation.m_hMemory,
8630 if(result == VK_SUCCESS)
8632 m_DedicatedAllocation.m_pMappedData = *ppData;
8639 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8641 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8643 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8648 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8649 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8650 hAllocator->m_hDevice,
8651 m_DedicatedAllocation.m_hMemory);
8656 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8660 #if VMA_STATS_STRING_ENABLED
8662 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8666 json.WriteString(
"Blocks");
8669 json.WriteString(
"Allocations");
8672 json.WriteString(
"UnusedRanges");
8675 json.WriteString(
"UsedBytes");
8678 json.WriteString(
"UnusedBytes");
8683 json.WriteString(
"AllocationSize");
8684 json.BeginObject(
true);
8685 json.WriteString(
"Min");
8687 json.WriteString(
"Avg");
8689 json.WriteString(
"Max");
8696 json.WriteString(
"UnusedRangeSize");
8697 json.BeginObject(
true);
8698 json.WriteString(
"Min");
8700 json.WriteString(
"Avg");
8702 json.WriteString(
"Max");
8710 #endif // #if VMA_STATS_STRING_ENABLED
8712 struct VmaSuballocationItemSizeLess
8715 const VmaSuballocationList::iterator lhs,
8716 const VmaSuballocationList::iterator rhs)
const
8718 return lhs->size < rhs->size;
8721 const VmaSuballocationList::iterator lhs,
8722 VkDeviceSize rhsSize)
const
8724 return lhs->size < rhsSize;
8732 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8734 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8738 #if VMA_STATS_STRING_ENABLED
8740 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8741 VkDeviceSize unusedBytes,
8742 size_t allocationCount,
8743 size_t unusedRangeCount)
const
8747 json.WriteString(
"TotalBytes");
8748 json.WriteNumber(GetSize());
8750 json.WriteString(
"UnusedBytes");
8751 json.WriteNumber(unusedBytes);
8753 json.WriteString(
"Allocations");
8754 json.WriteNumber((uint64_t)allocationCount);
8756 json.WriteString(
"UnusedRanges");
8757 json.WriteNumber((uint64_t)unusedRangeCount);
8759 json.WriteString(
"Suballocations");
8763 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8764 VkDeviceSize offset,
8767 json.BeginObject(
true);
8769 json.WriteString(
"Offset");
8770 json.WriteNumber(offset);
8772 hAllocation->PrintParameters(json);
8777 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8778 VkDeviceSize offset,
8779 VkDeviceSize size)
const
8781 json.BeginObject(
true);
8783 json.WriteString(
"Offset");
8784 json.WriteNumber(offset);
8786 json.WriteString(
"Type");
8787 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8789 json.WriteString(
"Size");
8790 json.WriteNumber(size);
8795 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8801 #endif // #if VMA_STATS_STRING_ENABLED
8806 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8807 VmaBlockMetadata(hAllocator),
8810 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8811 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8815 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8819 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8821 VmaBlockMetadata::Init(size);
8824 m_SumFreeSize = size;
8826 VmaSuballocation suballoc = {};
8827 suballoc.offset = 0;
8828 suballoc.size = size;
8829 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8830 suballoc.hAllocation = VK_NULL_HANDLE;
8832 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8833 m_Suballocations.push_back(suballoc);
8834 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8836 m_FreeSuballocationsBySize.push_back(suballocItem);
8839 bool VmaBlockMetadata_Generic::Validate()
const
8841 VMA_VALIDATE(!m_Suballocations.empty());
8844 VkDeviceSize calculatedOffset = 0;
8846 uint32_t calculatedFreeCount = 0;
8848 VkDeviceSize calculatedSumFreeSize = 0;
8851 size_t freeSuballocationsToRegister = 0;
8853 bool prevFree =
false;
8855 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8856 suballocItem != m_Suballocations.cend();
8859 const VmaSuballocation& subAlloc = *suballocItem;
8862 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8864 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8866 VMA_VALIDATE(!prevFree || !currFree);
8868 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8872 calculatedSumFreeSize += subAlloc.size;
8873 ++calculatedFreeCount;
8874 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8876 ++freeSuballocationsToRegister;
8880 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8884 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8885 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8888 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8891 calculatedOffset += subAlloc.size;
8892 prevFree = currFree;
8897 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8899 VkDeviceSize lastSize = 0;
8900 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8902 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8905 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8907 VMA_VALIDATE(suballocItem->size >= lastSize);
8909 lastSize = suballocItem->size;
8913 VMA_VALIDATE(ValidateFreeSuballocationList());
8914 VMA_VALIDATE(calculatedOffset == GetSize());
8915 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8916 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8921 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8923 if(!m_FreeSuballocationsBySize.empty())
8925 return m_FreeSuballocationsBySize.back()->size;
8933 bool VmaBlockMetadata_Generic::IsEmpty()
const
8935 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8938 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8942 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8954 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8955 suballocItem != m_Suballocations.cend();
8958 const VmaSuballocation& suballoc = *suballocItem;
8959 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8972 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8974 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8976 inoutStats.
size += GetSize();
8983 #if VMA_STATS_STRING_ENABLED
8985 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8987 PrintDetailedMap_Begin(json,
8989 m_Suballocations.size() - (
size_t)m_FreeCount,
8993 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8994 suballocItem != m_Suballocations.cend();
8995 ++suballocItem, ++i)
8997 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8999 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9003 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9007 PrintDetailedMap_End(json);
9010 #endif // #if VMA_STATS_STRING_ENABLED
9012 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9013 uint32_t currentFrameIndex,
9014 uint32_t frameInUseCount,
9015 VkDeviceSize bufferImageGranularity,
9016 VkDeviceSize allocSize,
9017 VkDeviceSize allocAlignment,
9019 VmaSuballocationType allocType,
9020 bool canMakeOtherLost,
9022 VmaAllocationRequest* pAllocationRequest)
9024 VMA_ASSERT(allocSize > 0);
9025 VMA_ASSERT(!upperAddress);
9026 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9027 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9028 VMA_HEAVY_ASSERT(Validate());
9030 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9033 if(canMakeOtherLost ==
false &&
9034 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9040 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9041 if(freeSuballocCount > 0)
9046 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9047 m_FreeSuballocationsBySize.data(),
9048 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9049 allocSize + 2 * VMA_DEBUG_MARGIN,
9050 VmaSuballocationItemSizeLess());
9051 size_t index = it - m_FreeSuballocationsBySize.data();
9052 for(; index < freeSuballocCount; ++index)
9057 bufferImageGranularity,
9061 m_FreeSuballocationsBySize[index],
9063 &pAllocationRequest->offset,
9064 &pAllocationRequest->itemsToMakeLostCount,
9065 &pAllocationRequest->sumFreeSize,
9066 &pAllocationRequest->sumItemSize))
9068 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9073 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9075 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9076 it != m_Suballocations.end();
9079 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9082 bufferImageGranularity,
9088 &pAllocationRequest->offset,
9089 &pAllocationRequest->itemsToMakeLostCount,
9090 &pAllocationRequest->sumFreeSize,
9091 &pAllocationRequest->sumItemSize))
9093 pAllocationRequest->item = it;
9101 for(
size_t index = freeSuballocCount; index--; )
9106 bufferImageGranularity,
9110 m_FreeSuballocationsBySize[index],
9112 &pAllocationRequest->offset,
9113 &pAllocationRequest->itemsToMakeLostCount,
9114 &pAllocationRequest->sumFreeSize,
9115 &pAllocationRequest->sumItemSize))
9117 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9124 if(canMakeOtherLost)
9129 VmaAllocationRequest tmpAllocRequest = {};
9130 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9131 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9132 suballocIt != m_Suballocations.end();
9135 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9136 suballocIt->hAllocation->CanBecomeLost())
9141 bufferImageGranularity,
9147 &tmpAllocRequest.offset,
9148 &tmpAllocRequest.itemsToMakeLostCount,
9149 &tmpAllocRequest.sumFreeSize,
9150 &tmpAllocRequest.sumItemSize))
9154 *pAllocationRequest = tmpAllocRequest;
9155 pAllocationRequest->item = suballocIt;
9158 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9160 *pAllocationRequest = tmpAllocRequest;
9161 pAllocationRequest->item = suballocIt;
9174 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9175 uint32_t currentFrameIndex,
9176 uint32_t frameInUseCount,
9177 VmaAllocationRequest* pAllocationRequest)
9179 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9181 while(pAllocationRequest->itemsToMakeLostCount > 0)
9183 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9185 ++pAllocationRequest->item;
9187 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9188 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9189 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9190 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9192 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9193 --pAllocationRequest->itemsToMakeLostCount;
9201 VMA_HEAVY_ASSERT(Validate());
9202 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9203 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9208 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9210 uint32_t lostAllocationCount = 0;
9211 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9212 it != m_Suballocations.end();
9215 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9216 it->hAllocation->CanBecomeLost() &&
9217 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9219 it = FreeSuballocation(it);
9220 ++lostAllocationCount;
9223 return lostAllocationCount;
9226 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9228 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9229 it != m_Suballocations.end();
9232 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9234 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9236 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9237 return VK_ERROR_VALIDATION_FAILED_EXT;
9239 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9241 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9242 return VK_ERROR_VALIDATION_FAILED_EXT;
9250 void VmaBlockMetadata_Generic::Alloc(
9251 const VmaAllocationRequest& request,
9252 VmaSuballocationType type,
9253 VkDeviceSize allocSize,
9256 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9257 VMA_ASSERT(request.item != m_Suballocations.end());
9258 VmaSuballocation& suballoc = *request.item;
9260 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9262 VMA_ASSERT(request.offset >= suballoc.offset);
9263 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9264 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9265 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9269 UnregisterFreeSuballocation(request.item);
9271 suballoc.offset = request.offset;
9272 suballoc.size = allocSize;
9273 suballoc.type = type;
9274 suballoc.hAllocation = hAllocation;
9279 VmaSuballocation paddingSuballoc = {};
9280 paddingSuballoc.offset = request.offset + allocSize;
9281 paddingSuballoc.size = paddingEnd;
9282 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9283 VmaSuballocationList::iterator next = request.item;
9285 const VmaSuballocationList::iterator paddingEndItem =
9286 m_Suballocations.insert(next, paddingSuballoc);
9287 RegisterFreeSuballocation(paddingEndItem);
9293 VmaSuballocation paddingSuballoc = {};
9294 paddingSuballoc.offset = request.offset - paddingBegin;
9295 paddingSuballoc.size = paddingBegin;
9296 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9297 const VmaSuballocationList::iterator paddingBeginItem =
9298 m_Suballocations.insert(request.item, paddingSuballoc);
9299 RegisterFreeSuballocation(paddingBeginItem);
9303 m_FreeCount = m_FreeCount - 1;
9304 if(paddingBegin > 0)
9312 m_SumFreeSize -= allocSize;
9315 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9317 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9318 suballocItem != m_Suballocations.end();
9321 VmaSuballocation& suballoc = *suballocItem;
9322 if(suballoc.hAllocation == allocation)
9324 FreeSuballocation(suballocItem);
9325 VMA_HEAVY_ASSERT(Validate());
9329 VMA_ASSERT(0 &&
"Not found!");
9332 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9334 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9335 suballocItem != m_Suballocations.end();
9338 VmaSuballocation& suballoc = *suballocItem;
9339 if(suballoc.offset == offset)
9341 FreeSuballocation(suballocItem);
9345 VMA_ASSERT(0 &&
"Not found!");
9348 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9350 VkDeviceSize lastSize = 0;
9351 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9353 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9355 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9356 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9357 VMA_VALIDATE(it->size >= lastSize);
9358 lastSize = it->size;
9363 bool VmaBlockMetadata_Generic::CheckAllocation(
9364 uint32_t currentFrameIndex,
9365 uint32_t frameInUseCount,
9366 VkDeviceSize bufferImageGranularity,
9367 VkDeviceSize allocSize,
9368 VkDeviceSize allocAlignment,
9369 VmaSuballocationType allocType,
9370 VmaSuballocationList::const_iterator suballocItem,
9371 bool canMakeOtherLost,
9372 VkDeviceSize* pOffset,
9373 size_t* itemsToMakeLostCount,
9374 VkDeviceSize* pSumFreeSize,
9375 VkDeviceSize* pSumItemSize)
const
9377 VMA_ASSERT(allocSize > 0);
9378 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9379 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9380 VMA_ASSERT(pOffset != VMA_NULL);
9382 *itemsToMakeLostCount = 0;
9386 if(canMakeOtherLost)
9388 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9390 *pSumFreeSize = suballocItem->size;
9394 if(suballocItem->hAllocation->CanBecomeLost() &&
9395 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9397 ++*itemsToMakeLostCount;
9398 *pSumItemSize = suballocItem->size;
9407 if(GetSize() - suballocItem->offset < allocSize)
9413 *pOffset = suballocItem->offset;
9416 if(VMA_DEBUG_MARGIN > 0)
9418 *pOffset += VMA_DEBUG_MARGIN;
9422 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9426 if(bufferImageGranularity > 1)
9428 bool bufferImageGranularityConflict =
false;
9429 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9430 while(prevSuballocItem != m_Suballocations.cbegin())
9433 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9434 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9436 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9438 bufferImageGranularityConflict =
true;
9446 if(bufferImageGranularityConflict)
9448 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9454 if(*pOffset >= suballocItem->offset + suballocItem->size)
9460 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9463 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9465 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9467 if(suballocItem->offset + totalSize > GetSize())
9474 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9475 if(totalSize > suballocItem->size)
9477 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9478 while(remainingSize > 0)
9481 if(lastSuballocItem == m_Suballocations.cend())
9485 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9487 *pSumFreeSize += lastSuballocItem->size;
9491 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9492 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9493 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9495 ++*itemsToMakeLostCount;
9496 *pSumItemSize += lastSuballocItem->size;
9503 remainingSize = (lastSuballocItem->size < remainingSize) ?
9504 remainingSize - lastSuballocItem->size : 0;
9510 if(bufferImageGranularity > 1)
9512 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9514 while(nextSuballocItem != m_Suballocations.cend())
9516 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9517 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9519 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9521 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9522 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9523 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9525 ++*itemsToMakeLostCount;
9544 const VmaSuballocation& suballoc = *suballocItem;
9545 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9547 *pSumFreeSize = suballoc.size;
9550 if(suballoc.size < allocSize)
9556 *pOffset = suballoc.offset;
9559 if(VMA_DEBUG_MARGIN > 0)
9561 *pOffset += VMA_DEBUG_MARGIN;
9565 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9569 if(bufferImageGranularity > 1)
9571 bool bufferImageGranularityConflict =
false;
9572 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9573 while(prevSuballocItem != m_Suballocations.cbegin())
9576 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9577 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9579 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9581 bufferImageGranularityConflict =
true;
9589 if(bufferImageGranularityConflict)
9591 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9596 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9599 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9602 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9609 if(bufferImageGranularity > 1)
9611 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9613 while(nextSuballocItem != m_Suballocations.cend())
9615 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9616 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9618 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9637 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9639 VMA_ASSERT(item != m_Suballocations.end());
9640 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9642 VmaSuballocationList::iterator nextItem = item;
9644 VMA_ASSERT(nextItem != m_Suballocations.end());
9645 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9647 item->size += nextItem->size;
9649 m_Suballocations.erase(nextItem);
9652 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9655 VmaSuballocation& suballoc = *suballocItem;
9656 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9657 suballoc.hAllocation = VK_NULL_HANDLE;
9661 m_SumFreeSize += suballoc.size;
9664 bool mergeWithNext =
false;
9665 bool mergeWithPrev =
false;
9667 VmaSuballocationList::iterator nextItem = suballocItem;
9669 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9671 mergeWithNext =
true;
9674 VmaSuballocationList::iterator prevItem = suballocItem;
9675 if(suballocItem != m_Suballocations.begin())
9678 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9680 mergeWithPrev =
true;
9686 UnregisterFreeSuballocation(nextItem);
9687 MergeFreeWithNext(suballocItem);
9692 UnregisterFreeSuballocation(prevItem);
9693 MergeFreeWithNext(prevItem);
9694 RegisterFreeSuballocation(prevItem);
9699 RegisterFreeSuballocation(suballocItem);
9700 return suballocItem;
9704 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9706 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9707 VMA_ASSERT(item->size > 0);
9711 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9713 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9715 if(m_FreeSuballocationsBySize.empty())
9717 m_FreeSuballocationsBySize.push_back(item);
9721 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9729 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9731 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9732 VMA_ASSERT(item->size > 0);
9736 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9738 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9740 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9741 m_FreeSuballocationsBySize.data(),
9742 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9744 VmaSuballocationItemSizeLess());
9745 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9746 index < m_FreeSuballocationsBySize.size();
9749 if(m_FreeSuballocationsBySize[index] == item)
9751 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9754 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9756 VMA_ASSERT(0 &&
"Not found.");
9762 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9763 VkDeviceSize bufferImageGranularity,
9764 VmaSuballocationType& inOutPrevSuballocType)
const
9766 if(bufferImageGranularity == 1 || IsEmpty())
9771 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9772 bool typeConflictFound =
false;
9773 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9774 it != m_Suballocations.cend();
9777 const VmaSuballocationType suballocType = it->type;
9778 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9780 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9781 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9783 typeConflictFound =
true;
9785 inOutPrevSuballocType = suballocType;
9789 return typeConflictFound || minAlignment >= bufferImageGranularity;
9795 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9796 VmaBlockMetadata(hAllocator),
9798 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9799 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9800 m_1stVectorIndex(0),
9801 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9802 m_1stNullItemsBeginCount(0),
9803 m_1stNullItemsMiddleCount(0),
9804 m_2ndNullItemsCount(0)
9808 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9812 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9814 VmaBlockMetadata::Init(size);
9815 m_SumFreeSize = size;
9818 bool VmaBlockMetadata_Linear::Validate()
const
9820 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9821 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9823 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9824 VMA_VALIDATE(!suballocations1st.empty() ||
9825 suballocations2nd.empty() ||
9826 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9828 if(!suballocations1st.empty())
9831 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9833 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9835 if(!suballocations2nd.empty())
9838 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9841 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9842 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9844 VkDeviceSize sumUsedSize = 0;
9845 const size_t suballoc1stCount = suballocations1st.size();
9846 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9848 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9850 const size_t suballoc2ndCount = suballocations2nd.size();
9851 size_t nullItem2ndCount = 0;
9852 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9854 const VmaSuballocation& suballoc = suballocations2nd[i];
9855 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9857 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9858 VMA_VALIDATE(suballoc.offset >= offset);
9862 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9863 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9864 sumUsedSize += suballoc.size;
9871 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9874 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9877 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9879 const VmaSuballocation& suballoc = suballocations1st[i];
9880 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9881 suballoc.hAllocation == VK_NULL_HANDLE);
9884 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9886 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9888 const VmaSuballocation& suballoc = suballocations1st[i];
9889 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9891 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9892 VMA_VALIDATE(suballoc.offset >= offset);
9893 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9897 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9898 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9899 sumUsedSize += suballoc.size;
9906 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9908 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9910 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9912 const size_t suballoc2ndCount = suballocations2nd.size();
9913 size_t nullItem2ndCount = 0;
9914 for(
size_t i = suballoc2ndCount; i--; )
9916 const VmaSuballocation& suballoc = suballocations2nd[i];
9917 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9919 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9920 VMA_VALIDATE(suballoc.offset >= offset);
9924 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9925 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9926 sumUsedSize += suballoc.size;
9933 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9936 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9939 VMA_VALIDATE(offset <= GetSize());
9940 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9945 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9947 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9948 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9951 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9953 const VkDeviceSize size = GetSize();
9965 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9967 switch(m_2ndVectorMode)
9969 case SECOND_VECTOR_EMPTY:
9975 const size_t suballocations1stCount = suballocations1st.size();
9976 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9977 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9978 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9980 firstSuballoc.offset,
9981 size - (lastSuballoc.offset + lastSuballoc.size));
9985 case SECOND_VECTOR_RING_BUFFER:
9990 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9991 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9992 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9993 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9997 case SECOND_VECTOR_DOUBLE_STACK:
10002 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10003 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10004 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10005 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10015 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10017 const VkDeviceSize size = GetSize();
10018 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10019 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10020 const size_t suballoc1stCount = suballocations1st.size();
10021 const size_t suballoc2ndCount = suballocations2nd.size();
10032 VkDeviceSize lastOffset = 0;
10034 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10036 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10037 size_t nextAlloc2ndIndex = 0;
10038 while(lastOffset < freeSpace2ndTo1stEnd)
10041 while(nextAlloc2ndIndex < suballoc2ndCount &&
10042 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10044 ++nextAlloc2ndIndex;
10048 if(nextAlloc2ndIndex < suballoc2ndCount)
10050 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10053 if(lastOffset < suballoc.offset)
10056 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10070 lastOffset = suballoc.offset + suballoc.size;
10071 ++nextAlloc2ndIndex;
10077 if(lastOffset < freeSpace2ndTo1stEnd)
10079 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10087 lastOffset = freeSpace2ndTo1stEnd;
10092 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10093 const VkDeviceSize freeSpace1stTo2ndEnd =
10094 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10095 while(lastOffset < freeSpace1stTo2ndEnd)
10098 while(nextAlloc1stIndex < suballoc1stCount &&
10099 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10101 ++nextAlloc1stIndex;
10105 if(nextAlloc1stIndex < suballoc1stCount)
10107 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10110 if(lastOffset < suballoc.offset)
10113 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10127 lastOffset = suballoc.offset + suballoc.size;
10128 ++nextAlloc1stIndex;
10134 if(lastOffset < freeSpace1stTo2ndEnd)
10136 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10144 lastOffset = freeSpace1stTo2ndEnd;
10148 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10150 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10151 while(lastOffset < size)
10154 while(nextAlloc2ndIndex != SIZE_MAX &&
10155 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10157 --nextAlloc2ndIndex;
10161 if(nextAlloc2ndIndex != SIZE_MAX)
10163 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10166 if(lastOffset < suballoc.offset)
10169 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10183 lastOffset = suballoc.offset + suballoc.size;
10184 --nextAlloc2ndIndex;
10190 if(lastOffset < size)
10192 const VkDeviceSize unusedRangeSize = size - lastOffset;
10208 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10210 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10211 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10212 const VkDeviceSize size = GetSize();
10213 const size_t suballoc1stCount = suballocations1st.size();
10214 const size_t suballoc2ndCount = suballocations2nd.size();
10216 inoutStats.
size += size;
10218 VkDeviceSize lastOffset = 0;
10220 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10222 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10223 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10224 while(lastOffset < freeSpace2ndTo1stEnd)
10227 while(nextAlloc2ndIndex < suballoc2ndCount &&
10228 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10230 ++nextAlloc2ndIndex;
10234 if(nextAlloc2ndIndex < suballoc2ndCount)
10236 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10239 if(lastOffset < suballoc.offset)
10242 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10253 lastOffset = suballoc.offset + suballoc.size;
10254 ++nextAlloc2ndIndex;
10259 if(lastOffset < freeSpace2ndTo1stEnd)
10262 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10269 lastOffset = freeSpace2ndTo1stEnd;
10274 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10275 const VkDeviceSize freeSpace1stTo2ndEnd =
10276 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10277 while(lastOffset < freeSpace1stTo2ndEnd)
10280 while(nextAlloc1stIndex < suballoc1stCount &&
10281 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10283 ++nextAlloc1stIndex;
10287 if(nextAlloc1stIndex < suballoc1stCount)
10289 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10292 if(lastOffset < suballoc.offset)
10295 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10306 lastOffset = suballoc.offset + suballoc.size;
10307 ++nextAlloc1stIndex;
10312 if(lastOffset < freeSpace1stTo2ndEnd)
10315 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10322 lastOffset = freeSpace1stTo2ndEnd;
10326 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10328 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10329 while(lastOffset < size)
10332 while(nextAlloc2ndIndex != SIZE_MAX &&
10333 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10335 --nextAlloc2ndIndex;
10339 if(nextAlloc2ndIndex != SIZE_MAX)
10341 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10344 if(lastOffset < suballoc.offset)
10347 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10358 lastOffset = suballoc.offset + suballoc.size;
10359 --nextAlloc2ndIndex;
10364 if(lastOffset < size)
10367 const VkDeviceSize unusedRangeSize = size - lastOffset;
10380 #if VMA_STATS_STRING_ENABLED
10381 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10383 const VkDeviceSize size = GetSize();
10384 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10385 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10386 const size_t suballoc1stCount = suballocations1st.size();
10387 const size_t suballoc2ndCount = suballocations2nd.size();
10391 size_t unusedRangeCount = 0;
10392 VkDeviceSize usedBytes = 0;
10394 VkDeviceSize lastOffset = 0;
10396 size_t alloc2ndCount = 0;
10397 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10399 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10400 size_t nextAlloc2ndIndex = 0;
10401 while(lastOffset < freeSpace2ndTo1stEnd)
10404 while(nextAlloc2ndIndex < suballoc2ndCount &&
10405 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10407 ++nextAlloc2ndIndex;
10411 if(nextAlloc2ndIndex < suballoc2ndCount)
10413 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10416 if(lastOffset < suballoc.offset)
10419 ++unusedRangeCount;
10425 usedBytes += suballoc.size;
10428 lastOffset = suballoc.offset + suballoc.size;
10429 ++nextAlloc2ndIndex;
10434 if(lastOffset < freeSpace2ndTo1stEnd)
10437 ++unusedRangeCount;
10441 lastOffset = freeSpace2ndTo1stEnd;
10446 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10447 size_t alloc1stCount = 0;
10448 const VkDeviceSize freeSpace1stTo2ndEnd =
10449 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10450 while(lastOffset < freeSpace1stTo2ndEnd)
10453 while(nextAlloc1stIndex < suballoc1stCount &&
10454 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10456 ++nextAlloc1stIndex;
10460 if(nextAlloc1stIndex < suballoc1stCount)
10462 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10465 if(lastOffset < suballoc.offset)
10468 ++unusedRangeCount;
10474 usedBytes += suballoc.size;
10477 lastOffset = suballoc.offset + suballoc.size;
10478 ++nextAlloc1stIndex;
10483 if(lastOffset < size)
10486 ++unusedRangeCount;
10490 lastOffset = freeSpace1stTo2ndEnd;
10494 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10496 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10497 while(lastOffset < size)
10500 while(nextAlloc2ndIndex != SIZE_MAX &&
10501 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10503 --nextAlloc2ndIndex;
10507 if(nextAlloc2ndIndex != SIZE_MAX)
10509 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10512 if(lastOffset < suballoc.offset)
10515 ++unusedRangeCount;
10521 usedBytes += suballoc.size;
10524 lastOffset = suballoc.offset + suballoc.size;
10525 --nextAlloc2ndIndex;
10530 if(lastOffset < size)
10533 ++unusedRangeCount;
10542 const VkDeviceSize unusedBytes = size - usedBytes;
10543 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10548 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10550 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10551 size_t nextAlloc2ndIndex = 0;
10552 while(lastOffset < freeSpace2ndTo1stEnd)
10555 while(nextAlloc2ndIndex < suballoc2ndCount &&
10556 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10558 ++nextAlloc2ndIndex;
10562 if(nextAlloc2ndIndex < suballoc2ndCount)
10564 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10567 if(lastOffset < suballoc.offset)
10570 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10571 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10576 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10579 lastOffset = suballoc.offset + suballoc.size;
10580 ++nextAlloc2ndIndex;
10585 if(lastOffset < freeSpace2ndTo1stEnd)
10588 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10589 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10593 lastOffset = freeSpace2ndTo1stEnd;
10598 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10599 while(lastOffset < freeSpace1stTo2ndEnd)
10602 while(nextAlloc1stIndex < suballoc1stCount &&
10603 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10605 ++nextAlloc1stIndex;
10609 if(nextAlloc1stIndex < suballoc1stCount)
10611 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10614 if(lastOffset < suballoc.offset)
10617 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10618 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10623 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10626 lastOffset = suballoc.offset + suballoc.size;
10627 ++nextAlloc1stIndex;
10632 if(lastOffset < freeSpace1stTo2ndEnd)
10635 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10636 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10640 lastOffset = freeSpace1stTo2ndEnd;
10644 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10646 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10647 while(lastOffset < size)
10650 while(nextAlloc2ndIndex != SIZE_MAX &&
10651 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10653 --nextAlloc2ndIndex;
10657 if(nextAlloc2ndIndex != SIZE_MAX)
10659 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10662 if(lastOffset < suballoc.offset)
10665 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10666 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10671 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10674 lastOffset = suballoc.offset + suballoc.size;
10675 --nextAlloc2ndIndex;
10680 if(lastOffset < size)
10683 const VkDeviceSize unusedRangeSize = size - lastOffset;
10684 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10693 PrintDetailedMap_End(json);
10695 #endif // #if VMA_STATS_STRING_ENABLED
10697 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10698 uint32_t currentFrameIndex,
10699 uint32_t frameInUseCount,
10700 VkDeviceSize bufferImageGranularity,
10701 VkDeviceSize allocSize,
10702 VkDeviceSize allocAlignment,
10704 VmaSuballocationType allocType,
10705 bool canMakeOtherLost,
10707 VmaAllocationRequest* pAllocationRequest)
10709 VMA_ASSERT(allocSize > 0);
10710 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10711 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10712 VMA_HEAVY_ASSERT(Validate());
10713 return upperAddress ?
10714 CreateAllocationRequest_UpperAddress(
10715 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10716 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10717 CreateAllocationRequest_LowerAddress(
10718 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10719 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10722 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10723 uint32_t currentFrameIndex,
10724 uint32_t frameInUseCount,
10725 VkDeviceSize bufferImageGranularity,
10726 VkDeviceSize allocSize,
10727 VkDeviceSize allocAlignment,
10728 VmaSuballocationType allocType,
10729 bool canMakeOtherLost,
10731 VmaAllocationRequest* pAllocationRequest)
10733 const VkDeviceSize size = GetSize();
10734 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10735 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10737 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10739 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10744 if(allocSize > size)
10748 VkDeviceSize resultBaseOffset = size - allocSize;
10749 if(!suballocations2nd.empty())
10751 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10752 resultBaseOffset = lastSuballoc.offset - allocSize;
10753 if(allocSize > lastSuballoc.offset)
10760 VkDeviceSize resultOffset = resultBaseOffset;
10763 if(VMA_DEBUG_MARGIN > 0)
10765 if(resultOffset < VMA_DEBUG_MARGIN)
10769 resultOffset -= VMA_DEBUG_MARGIN;
10773 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10777 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10779 bool bufferImageGranularityConflict =
false;
10780 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10782 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10783 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10785 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10787 bufferImageGranularityConflict =
true;
10795 if(bufferImageGranularityConflict)
10797 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10802 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10803 suballocations1st.back().offset + suballocations1st.back().size :
10805 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10809 if(bufferImageGranularity > 1)
10811 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10813 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10814 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10816 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10830 pAllocationRequest->offset = resultOffset;
10831 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10832 pAllocationRequest->sumItemSize = 0;
10834 pAllocationRequest->itemsToMakeLostCount = 0;
10835 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10842 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10843 uint32_t currentFrameIndex,
10844 uint32_t frameInUseCount,
10845 VkDeviceSize bufferImageGranularity,
10846 VkDeviceSize allocSize,
10847 VkDeviceSize allocAlignment,
10848 VmaSuballocationType allocType,
10849 bool canMakeOtherLost,
10851 VmaAllocationRequest* pAllocationRequest)
10853 const VkDeviceSize size = GetSize();
10854 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10855 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10857 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10861 VkDeviceSize resultBaseOffset = 0;
10862 if(!suballocations1st.empty())
10864 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10865 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10869 VkDeviceSize resultOffset = resultBaseOffset;
10872 if(VMA_DEBUG_MARGIN > 0)
10874 resultOffset += VMA_DEBUG_MARGIN;
10878 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10882 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10884 bool bufferImageGranularityConflict =
false;
10885 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10887 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10888 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10890 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10892 bufferImageGranularityConflict =
true;
10900 if(bufferImageGranularityConflict)
10902 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10906 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10907 suballocations2nd.back().offset : size;
10910 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10914 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10916 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10918 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10919 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10921 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10935 pAllocationRequest->offset = resultOffset;
10936 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10937 pAllocationRequest->sumItemSize = 0;
10939 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10940 pAllocationRequest->itemsToMakeLostCount = 0;
10947 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10949 VMA_ASSERT(!suballocations1st.empty());
10951 VkDeviceSize resultBaseOffset = 0;
10952 if(!suballocations2nd.empty())
10954 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10955 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10959 VkDeviceSize resultOffset = resultBaseOffset;
10962 if(VMA_DEBUG_MARGIN > 0)
10964 resultOffset += VMA_DEBUG_MARGIN;
10968 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10972 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10974 bool bufferImageGranularityConflict =
false;
10975 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10977 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10978 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10980 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10982 bufferImageGranularityConflict =
true;
10990 if(bufferImageGranularityConflict)
10992 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10996 pAllocationRequest->itemsToMakeLostCount = 0;
10997 pAllocationRequest->sumItemSize = 0;
10998 size_t index1st = m_1stNullItemsBeginCount;
11000 if(canMakeOtherLost)
11002 while(index1st < suballocations1st.size() &&
11003 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11006 const VmaSuballocation& suballoc = suballocations1st[index1st];
11007 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11013 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11014 if(suballoc.hAllocation->CanBecomeLost() &&
11015 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11017 ++pAllocationRequest->itemsToMakeLostCount;
11018 pAllocationRequest->sumItemSize += suballoc.size;
11030 if(bufferImageGranularity > 1)
11032 while(index1st < suballocations1st.size())
11034 const VmaSuballocation& suballoc = suballocations1st[index1st];
11035 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11037 if(suballoc.hAllocation != VK_NULL_HANDLE)
11040 if(suballoc.hAllocation->CanBecomeLost() &&
11041 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11043 ++pAllocationRequest->itemsToMakeLostCount;
11044 pAllocationRequest->sumItemSize += suballoc.size;
11062 if(index1st == suballocations1st.size() &&
11063 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11066 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11071 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11072 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11076 if(bufferImageGranularity > 1)
11078 for(
size_t nextSuballocIndex = index1st;
11079 nextSuballocIndex < suballocations1st.size();
11080 nextSuballocIndex++)
11082 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11083 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11085 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11099 pAllocationRequest->offset = resultOffset;
11100 pAllocationRequest->sumFreeSize =
11101 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11103 - pAllocationRequest->sumItemSize;
11104 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11113 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11114 uint32_t currentFrameIndex,
11115 uint32_t frameInUseCount,
11116 VmaAllocationRequest* pAllocationRequest)
11118 if(pAllocationRequest->itemsToMakeLostCount == 0)
11123 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11126 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11127 size_t index = m_1stNullItemsBeginCount;
11128 size_t madeLostCount = 0;
11129 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11131 if(index == suballocations->size())
11135 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11137 suballocations = &AccessSuballocations2nd();
11141 VMA_ASSERT(!suballocations->empty());
11143 VmaSuballocation& suballoc = (*suballocations)[index];
11144 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11146 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11147 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11148 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11150 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11151 suballoc.hAllocation = VK_NULL_HANDLE;
11152 m_SumFreeSize += suballoc.size;
11153 if(suballocations == &AccessSuballocations1st())
11155 ++m_1stNullItemsMiddleCount;
11159 ++m_2ndNullItemsCount;
11171 CleanupAfterFree();
11177 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11179 uint32_t lostAllocationCount = 0;
11181 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11182 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11184 VmaSuballocation& suballoc = suballocations1st[i];
11185 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11186 suballoc.hAllocation->CanBecomeLost() &&
11187 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11189 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11190 suballoc.hAllocation = VK_NULL_HANDLE;
11191 ++m_1stNullItemsMiddleCount;
11192 m_SumFreeSize += suballoc.size;
11193 ++lostAllocationCount;
11197 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11198 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11200 VmaSuballocation& suballoc = suballocations2nd[i];
11201 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11202 suballoc.hAllocation->CanBecomeLost() &&
11203 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11205 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11206 suballoc.hAllocation = VK_NULL_HANDLE;
11207 ++m_2ndNullItemsCount;
11208 m_SumFreeSize += suballoc.size;
11209 ++lostAllocationCount;
11213 if(lostAllocationCount)
11215 CleanupAfterFree();
11218 return lostAllocationCount;
11221 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11223 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11224 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11226 const VmaSuballocation& suballoc = suballocations1st[i];
11227 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11229 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11231 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11232 return VK_ERROR_VALIDATION_FAILED_EXT;
11234 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11236 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11237 return VK_ERROR_VALIDATION_FAILED_EXT;
11242 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11243 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11245 const VmaSuballocation& suballoc = suballocations2nd[i];
11246 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11248 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11250 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11251 return VK_ERROR_VALIDATION_FAILED_EXT;
11253 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11255 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11256 return VK_ERROR_VALIDATION_FAILED_EXT;
11264 void VmaBlockMetadata_Linear::Alloc(
11265 const VmaAllocationRequest& request,
11266 VmaSuballocationType type,
11267 VkDeviceSize allocSize,
11270 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11272 switch(request.type)
11274 case VmaAllocationRequestType::UpperAddress:
11276 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11277 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11278 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11279 suballocations2nd.push_back(newSuballoc);
11280 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11283 case VmaAllocationRequestType::EndOf1st:
11285 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11287 VMA_ASSERT(suballocations1st.empty() ||
11288 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11290 VMA_ASSERT(request.offset + allocSize <= GetSize());
11292 suballocations1st.push_back(newSuballoc);
11295 case VmaAllocationRequestType::EndOf2nd:
11297 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11299 VMA_ASSERT(!suballocations1st.empty() &&
11300 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11301 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11303 switch(m_2ndVectorMode)
11305 case SECOND_VECTOR_EMPTY:
11307 VMA_ASSERT(suballocations2nd.empty());
11308 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11310 case SECOND_VECTOR_RING_BUFFER:
11312 VMA_ASSERT(!suballocations2nd.empty());
11314 case SECOND_VECTOR_DOUBLE_STACK:
11315 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11321 suballocations2nd.push_back(newSuballoc);
11325 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11328 m_SumFreeSize -= newSuballoc.size;
11331 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11333 FreeAtOffset(allocation->GetOffset());
11336 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11338 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11339 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11341 if(!suballocations1st.empty())
11344 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11345 if(firstSuballoc.offset == offset)
11347 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11348 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11349 m_SumFreeSize += firstSuballoc.size;
11350 ++m_1stNullItemsBeginCount;
11351 CleanupAfterFree();
11357 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11358 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11360 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11361 if(lastSuballoc.offset == offset)
11363 m_SumFreeSize += lastSuballoc.size;
11364 suballocations2nd.pop_back();
11365 CleanupAfterFree();
11370 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11372 VmaSuballocation& lastSuballoc = suballocations1st.back();
11373 if(lastSuballoc.offset == offset)
11375 m_SumFreeSize += lastSuballoc.size;
11376 suballocations1st.pop_back();
11377 CleanupAfterFree();
11384 VmaSuballocation refSuballoc;
11385 refSuballoc.offset = offset;
11387 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11388 suballocations1st.begin() + m_1stNullItemsBeginCount,
11389 suballocations1st.end(),
11391 VmaSuballocationOffsetLess());
11392 if(it != suballocations1st.end())
11394 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11395 it->hAllocation = VK_NULL_HANDLE;
11396 ++m_1stNullItemsMiddleCount;
11397 m_SumFreeSize += it->size;
11398 CleanupAfterFree();
11403 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11406 VmaSuballocation refSuballoc;
11407 refSuballoc.offset = offset;
11409 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11410 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11411 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11412 if(it != suballocations2nd.end())
11414 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11415 it->hAllocation = VK_NULL_HANDLE;
11416 ++m_2ndNullItemsCount;
11417 m_SumFreeSize += it->size;
11418 CleanupAfterFree();
11423 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11426 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11428 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11429 const size_t suballocCount = AccessSuballocations1st().size();
11430 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11433 void VmaBlockMetadata_Linear::CleanupAfterFree()
11435 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11436 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11440 suballocations1st.clear();
11441 suballocations2nd.clear();
11442 m_1stNullItemsBeginCount = 0;
11443 m_1stNullItemsMiddleCount = 0;
11444 m_2ndNullItemsCount = 0;
11445 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11449 const size_t suballoc1stCount = suballocations1st.size();
11450 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11451 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11454 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11455 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11457 ++m_1stNullItemsBeginCount;
11458 --m_1stNullItemsMiddleCount;
11462 while(m_1stNullItemsMiddleCount > 0 &&
11463 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11465 --m_1stNullItemsMiddleCount;
11466 suballocations1st.pop_back();
11470 while(m_2ndNullItemsCount > 0 &&
11471 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11473 --m_2ndNullItemsCount;
11474 suballocations2nd.pop_back();
11478 while(m_2ndNullItemsCount > 0 &&
11479 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11481 --m_2ndNullItemsCount;
11482 VmaVectorRemove(suballocations2nd, 0);
11485 if(ShouldCompact1st())
11487 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11488 size_t srcIndex = m_1stNullItemsBeginCount;
11489 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11491 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11495 if(dstIndex != srcIndex)
11497 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11501 suballocations1st.resize(nonNullItemCount);
11502 m_1stNullItemsBeginCount = 0;
11503 m_1stNullItemsMiddleCount = 0;
11507 if(suballocations2nd.empty())
11509 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11513 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11515 suballocations1st.clear();
11516 m_1stNullItemsBeginCount = 0;
11518 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11521 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11522 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11523 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11524 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11526 ++m_1stNullItemsBeginCount;
11527 --m_1stNullItemsMiddleCount;
11529 m_2ndNullItemsCount = 0;
11530 m_1stVectorIndex ^= 1;
11535 VMA_HEAVY_ASSERT(Validate());
11542 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11543 VmaBlockMetadata(hAllocator),
11545 m_AllocationCount(0),
11549 memset(m_FreeList, 0,
sizeof(m_FreeList));
11552 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11554 DeleteNode(m_Root);
11557 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11559 VmaBlockMetadata::Init(size);
11561 m_UsableSize = VmaPrevPow2(size);
11562 m_SumFreeSize = m_UsableSize;
11566 while(m_LevelCount < MAX_LEVELS &&
11567 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11572 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11573 rootNode->offset = 0;
11574 rootNode->type = Node::TYPE_FREE;
11575 rootNode->parent = VMA_NULL;
11576 rootNode->buddy = VMA_NULL;
11579 AddToFreeListFront(0, rootNode);
11582 bool VmaBlockMetadata_Buddy::Validate()
const
11585 ValidationContext ctx;
11586 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11588 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11590 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11591 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11594 for(uint32_t level = 0; level < m_LevelCount; ++level)
11596 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11597 m_FreeList[level].front->free.prev == VMA_NULL);
11599 for(Node* node = m_FreeList[level].front;
11601 node = node->free.next)
11603 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11605 if(node->free.next == VMA_NULL)
11607 VMA_VALIDATE(m_FreeList[level].back == node);
11611 VMA_VALIDATE(node->free.next->free.prev == node);
11617 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11619 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11625 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11627 for(uint32_t level = 0; level < m_LevelCount; ++level)
11629 if(m_FreeList[level].front != VMA_NULL)
11631 return LevelToNodeSize(level);
11637 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11639 const VkDeviceSize unusableSize = GetUnusableSize();
11650 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11652 if(unusableSize > 0)
11661 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11663 const VkDeviceSize unusableSize = GetUnusableSize();
11665 inoutStats.
size += GetSize();
11666 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11671 if(unusableSize > 0)
11678 #if VMA_STATS_STRING_ENABLED
11680 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11684 CalcAllocationStatInfo(stat);
11686 PrintDetailedMap_Begin(
11692 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11694 const VkDeviceSize unusableSize = GetUnusableSize();
11695 if(unusableSize > 0)
11697 PrintDetailedMap_UnusedRange(json,
11702 PrintDetailedMap_End(json);
11705 #endif // #if VMA_STATS_STRING_ENABLED
11707 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11708 uint32_t currentFrameIndex,
11709 uint32_t frameInUseCount,
11710 VkDeviceSize bufferImageGranularity,
11711 VkDeviceSize allocSize,
11712 VkDeviceSize allocAlignment,
11714 VmaSuballocationType allocType,
11715 bool canMakeOtherLost,
11717 VmaAllocationRequest* pAllocationRequest)
11719 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11723 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11724 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11725 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11727 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11728 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11731 if(allocSize > m_UsableSize)
11736 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11737 for(uint32_t level = targetLevel + 1; level--; )
11739 for(Node* freeNode = m_FreeList[level].front;
11740 freeNode != VMA_NULL;
11741 freeNode = freeNode->free.next)
11743 if(freeNode->offset % allocAlignment == 0)
11745 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11746 pAllocationRequest->offset = freeNode->offset;
11747 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11748 pAllocationRequest->sumItemSize = 0;
11749 pAllocationRequest->itemsToMakeLostCount = 0;
11750 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11759 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11760 uint32_t currentFrameIndex,
11761 uint32_t frameInUseCount,
11762 VmaAllocationRequest* pAllocationRequest)
11768 return pAllocationRequest->itemsToMakeLostCount == 0;
11771 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11780 void VmaBlockMetadata_Buddy::Alloc(
11781 const VmaAllocationRequest& request,
11782 VmaSuballocationType type,
11783 VkDeviceSize allocSize,
11786 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11788 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11789 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11791 Node* currNode = m_FreeList[currLevel].front;
11792 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11793 while(currNode->offset != request.offset)
11795 currNode = currNode->free.next;
11796 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11800 while(currLevel < targetLevel)
11804 RemoveFromFreeList(currLevel, currNode);
11806 const uint32_t childrenLevel = currLevel + 1;
11809 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11810 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11812 leftChild->offset = currNode->offset;
11813 leftChild->type = Node::TYPE_FREE;
11814 leftChild->parent = currNode;
11815 leftChild->buddy = rightChild;
11817 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11818 rightChild->type = Node::TYPE_FREE;
11819 rightChild->parent = currNode;
11820 rightChild->buddy = leftChild;
11823 currNode->type = Node::TYPE_SPLIT;
11824 currNode->split.leftChild = leftChild;
11827 AddToFreeListFront(childrenLevel, rightChild);
11828 AddToFreeListFront(childrenLevel, leftChild);
11833 currNode = m_FreeList[currLevel].front;
11842 VMA_ASSERT(currLevel == targetLevel &&
11843 currNode != VMA_NULL &&
11844 currNode->type == Node::TYPE_FREE);
11845 RemoveFromFreeList(currLevel, currNode);
11848 currNode->type = Node::TYPE_ALLOCATION;
11849 currNode->allocation.alloc = hAllocation;
11851 ++m_AllocationCount;
11853 m_SumFreeSize -= allocSize;
11856 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11858 if(node->type == Node::TYPE_SPLIT)
11860 DeleteNode(node->split.leftChild->buddy);
11861 DeleteNode(node->split.leftChild);
11864 vma_delete(GetAllocationCallbacks(), node);
11867 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11869 VMA_VALIDATE(level < m_LevelCount);
11870 VMA_VALIDATE(curr->parent == parent);
11871 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11872 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11875 case Node::TYPE_FREE:
11877 ctx.calculatedSumFreeSize += levelNodeSize;
11878 ++ctx.calculatedFreeCount;
11880 case Node::TYPE_ALLOCATION:
11881 ++ctx.calculatedAllocationCount;
11882 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11883 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11885 case Node::TYPE_SPLIT:
11887 const uint32_t childrenLevel = level + 1;
11888 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11889 const Node*
const leftChild = curr->split.leftChild;
11890 VMA_VALIDATE(leftChild != VMA_NULL);
11891 VMA_VALIDATE(leftChild->offset == curr->offset);
11892 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11894 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11896 const Node*
const rightChild = leftChild->buddy;
11897 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11898 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11900 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11911 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11914 uint32_t level = 0;
11915 VkDeviceSize currLevelNodeSize = m_UsableSize;
11916 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11917 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11920 currLevelNodeSize = nextLevelNodeSize;
11921 nextLevelNodeSize = currLevelNodeSize >> 1;
11926 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11929 Node* node = m_Root;
11930 VkDeviceSize nodeOffset = 0;
11931 uint32_t level = 0;
11932 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11933 while(node->type == Node::TYPE_SPLIT)
11935 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11936 if(offset < nodeOffset + nextLevelSize)
11938 node = node->split.leftChild;
11942 node = node->split.leftChild->buddy;
11943 nodeOffset += nextLevelSize;
11946 levelNodeSize = nextLevelSize;
11949 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11950 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11953 --m_AllocationCount;
11954 m_SumFreeSize += alloc->GetSize();
11956 node->type = Node::TYPE_FREE;
11959 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11961 RemoveFromFreeList(level, node->buddy);
11962 Node*
const parent = node->parent;
11964 vma_delete(GetAllocationCallbacks(), node->buddy);
11965 vma_delete(GetAllocationCallbacks(), node);
11966 parent->type = Node::TYPE_FREE;
11974 AddToFreeListFront(level, node);
11977 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11981 case Node::TYPE_FREE:
11987 case Node::TYPE_ALLOCATION:
11989 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11995 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11996 if(unusedRangeSize > 0)
12005 case Node::TYPE_SPLIT:
12007 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12008 const Node*
const leftChild = node->split.leftChild;
12009 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12010 const Node*
const rightChild = leftChild->buddy;
12011 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12019 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12021 VMA_ASSERT(node->type == Node::TYPE_FREE);
12024 Node*
const frontNode = m_FreeList[level].front;
12025 if(frontNode == VMA_NULL)
12027 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12028 node->free.prev = node->free.next = VMA_NULL;
12029 m_FreeList[level].front = m_FreeList[level].back = node;
12033 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12034 node->free.prev = VMA_NULL;
12035 node->free.next = frontNode;
12036 frontNode->free.prev = node;
12037 m_FreeList[level].front = node;
12041 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12043 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12046 if(node->free.prev == VMA_NULL)
12048 VMA_ASSERT(m_FreeList[level].front == node);
12049 m_FreeList[level].front = node->free.next;
12053 Node*
const prevFreeNode = node->free.prev;
12054 VMA_ASSERT(prevFreeNode->free.next == node);
12055 prevFreeNode->free.next = node->free.next;
12059 if(node->free.next == VMA_NULL)
12061 VMA_ASSERT(m_FreeList[level].back == node);
12062 m_FreeList[level].back = node->free.prev;
12066 Node*
const nextFreeNode = node->free.next;
12067 VMA_ASSERT(nextFreeNode->free.prev == node);
12068 nextFreeNode->free.prev = node->free.prev;
12072 #if VMA_STATS_STRING_ENABLED
12073 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12077 case Node::TYPE_FREE:
12078 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12080 case Node::TYPE_ALLOCATION:
12082 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12083 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12084 if(allocSize < levelNodeSize)
12086 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12090 case Node::TYPE_SPLIT:
12092 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12093 const Node*
const leftChild = node->split.leftChild;
12094 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12095 const Node*
const rightChild = leftChild->buddy;
12096 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12103 #endif // #if VMA_STATS_STRING_ENABLED
12109 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12110 m_pMetadata(VMA_NULL),
12111 m_MemoryTypeIndex(UINT32_MAX),
12113 m_hMemory(VK_NULL_HANDLE),
12115 m_pMappedData(VMA_NULL)
12119 void VmaDeviceMemoryBlock::Init(
12122 uint32_t newMemoryTypeIndex,
12123 VkDeviceMemory newMemory,
12124 VkDeviceSize newSize,
12126 uint32_t algorithm)
12128 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12130 m_hParentPool = hParentPool;
12131 m_MemoryTypeIndex = newMemoryTypeIndex;
12133 m_hMemory = newMemory;
12138 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12141 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12147 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12149 m_pMetadata->Init(newSize);
12152 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12156 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12158 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12159 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12160 m_hMemory = VK_NULL_HANDLE;
12162 vma_delete(allocator, m_pMetadata);
12163 m_pMetadata = VMA_NULL;
12166 bool VmaDeviceMemoryBlock::Validate()
const
12168 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12169 (m_pMetadata->GetSize() != 0));
12171 return m_pMetadata->Validate();
12174 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12176 void* pData =
nullptr;
12177 VkResult res = Map(hAllocator, 1, &pData);
12178 if(res != VK_SUCCESS)
12183 res = m_pMetadata->CheckCorruption(pData);
12185 Unmap(hAllocator, 1);
12190 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12197 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12198 if(m_MapCount != 0)
12200 m_MapCount += count;
12201 VMA_ASSERT(m_pMappedData != VMA_NULL);
12202 if(ppData != VMA_NULL)
12204 *ppData = m_pMappedData;
12210 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12211 hAllocator->m_hDevice,
12217 if(result == VK_SUCCESS)
12219 if(ppData != VMA_NULL)
12221 *ppData = m_pMappedData;
12223 m_MapCount = count;
12229 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12236 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12237 if(m_MapCount >= count)
12239 m_MapCount -= count;
12240 if(m_MapCount == 0)
12242 m_pMappedData = VMA_NULL;
12243 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12248 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12252 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12254 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12255 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12258 VkResult res = Map(hAllocator, 1, &pData);
12259 if(res != VK_SUCCESS)
12264 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12265 VmaWriteMagicValue(pData, allocOffset + allocSize);
12267 Unmap(hAllocator, 1);
12272 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12274 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12275 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12278 VkResult res = Map(hAllocator, 1, &pData);
12279 if(res != VK_SUCCESS)
12284 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12286 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12288 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12290 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12293 Unmap(hAllocator, 1);
12298 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12301 VkDeviceSize allocationLocalOffset,
12305 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12306 hAllocation->GetBlock() ==
this);
12307 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12308 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12309 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12311 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12312 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12315 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12318 VkDeviceSize allocationLocalOffset,
12322 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12323 hAllocation->GetBlock() ==
this);
12324 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12325 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12326 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12328 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12329 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12334 memset(&outInfo, 0,
sizeof(outInfo));
12353 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12361 VmaPool_T::VmaPool_T(
12364 VkDeviceSize preferredBlockSize) :
12368 createInfo.memoryTypeIndex,
12369 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12370 createInfo.minBlockCount,
12371 createInfo.maxBlockCount,
12373 createInfo.frameInUseCount,
12374 createInfo.blockSize != 0,
12381 VmaPool_T::~VmaPool_T()
12385 void VmaPool_T::SetName(
const char* pName)
12387 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12388 VmaFreeString(allocs, m_Name);
12390 if(pName != VMA_NULL)
12392 m_Name = VmaCreateStringCopy(allocs, pName);
12400 #if VMA_STATS_STRING_ENABLED
12402 #endif // #if VMA_STATS_STRING_ENABLED
12404 VmaBlockVector::VmaBlockVector(
12407 uint32_t memoryTypeIndex,
12408 VkDeviceSize preferredBlockSize,
12409 size_t minBlockCount,
12410 size_t maxBlockCount,
12411 VkDeviceSize bufferImageGranularity,
12412 uint32_t frameInUseCount,
12413 bool explicitBlockSize,
12414 uint32_t algorithm) :
12415 m_hAllocator(hAllocator),
12416 m_hParentPool(hParentPool),
12417 m_MemoryTypeIndex(memoryTypeIndex),
12418 m_PreferredBlockSize(preferredBlockSize),
12419 m_MinBlockCount(minBlockCount),
12420 m_MaxBlockCount(maxBlockCount),
12421 m_BufferImageGranularity(bufferImageGranularity),
12422 m_FrameInUseCount(frameInUseCount),
12423 m_ExplicitBlockSize(explicitBlockSize),
12424 m_Algorithm(algorithm),
12425 m_HasEmptyBlock(false),
12426 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12431 VmaBlockVector::~VmaBlockVector()
12433 for(
size_t i = m_Blocks.size(); i--; )
12435 m_Blocks[i]->Destroy(m_hAllocator);
12436 vma_delete(m_hAllocator, m_Blocks[i]);
12440 VkResult VmaBlockVector::CreateMinBlocks()
12442 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12444 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12445 if(res != VK_SUCCESS)
12453 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12455 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12457 const size_t blockCount = m_Blocks.size();
12466 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12468 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12469 VMA_ASSERT(pBlock);
12470 VMA_HEAVY_ASSERT(pBlock->Validate());
12471 pBlock->m_pMetadata->AddPoolStats(*pStats);
12475 bool VmaBlockVector::IsEmpty()
12477 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12478 return m_Blocks.empty();
12481 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12483 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12484 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12485 (VMA_DEBUG_MARGIN > 0) &&
12487 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12490 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12492 VkResult VmaBlockVector::Allocate(
12493 uint32_t currentFrameIndex,
12495 VkDeviceSize alignment,
12497 VmaSuballocationType suballocType,
12498 size_t allocationCount,
12502 VkResult res = VK_SUCCESS;
12504 if(IsCorruptionDetectionEnabled())
12506 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12507 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12511 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12512 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12514 res = AllocatePage(
12520 pAllocations + allocIndex);
12521 if(res != VK_SUCCESS)
12528 if(res != VK_SUCCESS)
12531 while(allocIndex--)
12533 Free(pAllocations[allocIndex]);
12535 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12541 VkResult VmaBlockVector::AllocatePage(
12542 uint32_t currentFrameIndex,
12544 VkDeviceSize alignment,
12546 VmaSuballocationType suballocType,
12554 VkDeviceSize freeMemory;
12556 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12558 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12562 const bool canFallbackToDedicated = !IsCustomPool();
12563 const bool canCreateNewBlock =
12565 (m_Blocks.size() < m_MaxBlockCount) &&
12566 (freeMemory >= size || !canFallbackToDedicated);
12573 canMakeOtherLost =
false;
12577 if(isUpperAddress &&
12580 return VK_ERROR_FEATURE_NOT_PRESENT;
12594 return VK_ERROR_FEATURE_NOT_PRESENT;
12598 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12600 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12608 if(!canMakeOtherLost || canCreateNewBlock)
12617 if(!m_Blocks.empty())
12619 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12620 VMA_ASSERT(pCurrBlock);
12621 VkResult res = AllocateFromBlock(
12631 if(res == VK_SUCCESS)
12633 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12643 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12645 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12646 VMA_ASSERT(pCurrBlock);
12647 VkResult res = AllocateFromBlock(
12657 if(res == VK_SUCCESS)
12659 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12667 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12669 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12670 VMA_ASSERT(pCurrBlock);
12671 VkResult res = AllocateFromBlock(
12681 if(res == VK_SUCCESS)
12683 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12691 if(canCreateNewBlock)
12694 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12695 uint32_t newBlockSizeShift = 0;
12696 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12698 if(!m_ExplicitBlockSize)
12701 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12702 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12704 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12705 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12707 newBlockSize = smallerNewBlockSize;
12708 ++newBlockSizeShift;
12717 size_t newBlockIndex = 0;
12718 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12719 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12721 if(!m_ExplicitBlockSize)
12723 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12725 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12726 if(smallerNewBlockSize >= size)
12728 newBlockSize = smallerNewBlockSize;
12729 ++newBlockSizeShift;
12730 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12731 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12740 if(res == VK_SUCCESS)
12742 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12743 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12745 res = AllocateFromBlock(
12755 if(res == VK_SUCCESS)
12757 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12763 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12770 if(canMakeOtherLost)
12772 uint32_t tryIndex = 0;
12773 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12775 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12776 VmaAllocationRequest bestRequest = {};
12777 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12783 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12785 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12786 VMA_ASSERT(pCurrBlock);
12787 VmaAllocationRequest currRequest = {};
12788 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12791 m_BufferImageGranularity,
12800 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12801 if(pBestRequestBlock == VMA_NULL ||
12802 currRequestCost < bestRequestCost)
12804 pBestRequestBlock = pCurrBlock;
12805 bestRequest = currRequest;
12806 bestRequestCost = currRequestCost;
12808 if(bestRequestCost == 0)
12819 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12821 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12822 VMA_ASSERT(pCurrBlock);
12823 VmaAllocationRequest currRequest = {};
12824 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12827 m_BufferImageGranularity,
12836 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12837 if(pBestRequestBlock == VMA_NULL ||
12838 currRequestCost < bestRequestCost ||
12841 pBestRequestBlock = pCurrBlock;
12842 bestRequest = currRequest;
12843 bestRequestCost = currRequestCost;
12845 if(bestRequestCost == 0 ||
12855 if(pBestRequestBlock != VMA_NULL)
12859 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12860 if(res != VK_SUCCESS)
12866 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12872 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12873 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12874 UpdateHasEmptyBlock();
12875 (*pAllocation)->InitBlockAllocation(
12877 bestRequest.offset,
12884 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12885 VMA_DEBUG_LOG(
" Returned from existing block");
12886 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12887 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12888 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12890 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12892 if(IsCorruptionDetectionEnabled())
12894 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12895 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12910 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12912 return VK_ERROR_TOO_MANY_OBJECTS;
12916 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12919 void VmaBlockVector::Free(
12922 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12924 bool budgetExceeded =
false;
12926 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12928 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12929 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12934 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12936 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12938 if(IsCorruptionDetectionEnabled())
12940 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12941 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12944 if(hAllocation->IsPersistentMap())
12946 pBlock->Unmap(m_hAllocator, 1);
12949 pBlock->m_pMetadata->Free(hAllocation);
12950 VMA_HEAVY_ASSERT(pBlock->Validate());
12952 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12954 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12956 if(pBlock->m_pMetadata->IsEmpty())
12959 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12961 pBlockToDelete = pBlock;
12968 else if(m_HasEmptyBlock && canDeleteBlock)
12970 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12971 if(pLastBlock->m_pMetadata->IsEmpty())
12973 pBlockToDelete = pLastBlock;
12974 m_Blocks.pop_back();
12978 UpdateHasEmptyBlock();
12979 IncrementallySortBlocks();
12984 if(pBlockToDelete != VMA_NULL)
12986 VMA_DEBUG_LOG(
" Deleted empty block");
12987 pBlockToDelete->Destroy(m_hAllocator);
12988 vma_delete(m_hAllocator, pBlockToDelete);
12992 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12994 VkDeviceSize result = 0;
12995 for(
size_t i = m_Blocks.size(); i--; )
12997 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12998 if(result >= m_PreferredBlockSize)
13006 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13008 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13010 if(m_Blocks[blockIndex] == pBlock)
13012 VmaVectorRemove(m_Blocks, blockIndex);
13019 void VmaBlockVector::IncrementallySortBlocks()
13024 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13026 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13028 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13035 VkResult VmaBlockVector::AllocateFromBlock(
13036 VmaDeviceMemoryBlock* pBlock,
13037 uint32_t currentFrameIndex,
13039 VkDeviceSize alignment,
13042 VmaSuballocationType suballocType,
13051 VmaAllocationRequest currRequest = {};
13052 if(pBlock->m_pMetadata->CreateAllocationRequest(
13055 m_BufferImageGranularity,
13065 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13069 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13070 if(res != VK_SUCCESS)
13076 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13077 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13078 UpdateHasEmptyBlock();
13079 (*pAllocation)->InitBlockAllocation(
13081 currRequest.offset,
13088 VMA_HEAVY_ASSERT(pBlock->Validate());
13089 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13090 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13091 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13093 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13095 if(IsCorruptionDetectionEnabled())
13097 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13098 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13102 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13105 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13107 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13108 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13109 allocInfo.allocationSize = blockSize;
13111 #if VMA_BUFFER_DEVICE_ADDRESS
13113 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13114 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13116 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13117 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13119 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
13121 VkDeviceMemory mem = VK_NULL_HANDLE;
13122 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13131 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13137 allocInfo.allocationSize,
13141 m_Blocks.push_back(pBlock);
13142 if(pNewBlockIndex != VMA_NULL)
13144 *pNewBlockIndex = m_Blocks.size() - 1;
13150 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13151 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13152 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13154 const size_t blockCount = m_Blocks.size();
13155 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13159 BLOCK_FLAG_USED = 0x00000001,
13160 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13168 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13169 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13170 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13173 const size_t moveCount = moves.size();
13174 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13176 const VmaDefragmentationMove& move = moves[moveIndex];
13177 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13178 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13181 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13184 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13186 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13187 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13188 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13190 currBlockInfo.pMappedData = pBlock->GetMappedData();
13192 if(currBlockInfo.pMappedData == VMA_NULL)
13194 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13195 if(pDefragCtx->res == VK_SUCCESS)
13197 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13204 if(pDefragCtx->res == VK_SUCCESS)
13206 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13207 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13209 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13211 const VmaDefragmentationMove& move = moves[moveIndex];
13213 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13214 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13216 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13221 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13222 memRange.memory = pSrcBlock->GetDeviceMemory();
13223 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13224 memRange.size = VMA_MIN(
13225 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13226 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13227 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13232 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13233 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13234 static_cast<size_t>(move.size));
13236 if(IsCorruptionDetectionEnabled())
13238 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13239 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13245 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13246 memRange.memory = pDstBlock->GetDeviceMemory();
13247 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13248 memRange.size = VMA_MIN(
13249 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13250 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13251 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13258 for(
size_t blockIndex = blockCount; blockIndex--; )
13260 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13261 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13263 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13264 pBlock->Unmap(m_hAllocator, 1);
13269 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13270 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13271 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13272 VkCommandBuffer commandBuffer)
13274 const size_t blockCount = m_Blocks.size();
13276 pDefragCtx->blockContexts.resize(blockCount);
13277 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13280 const size_t moveCount = moves.size();
13281 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13283 const VmaDefragmentationMove& move = moves[moveIndex];
13288 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13289 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13293 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13297 VkBufferCreateInfo bufCreateInfo;
13298 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13300 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13302 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13303 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13304 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13306 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13307 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13308 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13309 if(pDefragCtx->res == VK_SUCCESS)
13311 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13312 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13319 if(pDefragCtx->res == VK_SUCCESS)
13321 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13323 const VmaDefragmentationMove& move = moves[moveIndex];
13325 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13326 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13328 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13330 VkBufferCopy region = {
13334 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13335 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13340 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13342 pDefragCtx->res = VK_NOT_READY;
13348 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13350 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13351 if(pBlock->m_pMetadata->IsEmpty())
13353 if(m_Blocks.size() > m_MinBlockCount)
13355 if(pDefragmentationStats != VMA_NULL)
13358 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13361 VmaVectorRemove(m_Blocks, blockIndex);
13362 pBlock->Destroy(m_hAllocator);
13363 vma_delete(m_hAllocator, pBlock);
13371 UpdateHasEmptyBlock();
13374 void VmaBlockVector::UpdateHasEmptyBlock()
13376 m_HasEmptyBlock =
false;
13377 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13379 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13380 if(pBlock->m_pMetadata->IsEmpty())
13382 m_HasEmptyBlock =
true;
13388 #if VMA_STATS_STRING_ENABLED
13390 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13392 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13394 json.BeginObject();
13398 const char* poolName = m_hParentPool->GetName();
13399 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13401 json.WriteString(
"Name");
13402 json.WriteString(poolName);
13405 json.WriteString(
"MemoryTypeIndex");
13406 json.WriteNumber(m_MemoryTypeIndex);
13408 json.WriteString(
"BlockSize");
13409 json.WriteNumber(m_PreferredBlockSize);
13411 json.WriteString(
"BlockCount");
13412 json.BeginObject(
true);
13413 if(m_MinBlockCount > 0)
13415 json.WriteString(
"Min");
13416 json.WriteNumber((uint64_t)m_MinBlockCount);
13418 if(m_MaxBlockCount < SIZE_MAX)
13420 json.WriteString(
"Max");
13421 json.WriteNumber((uint64_t)m_MaxBlockCount);
13423 json.WriteString(
"Cur");
13424 json.WriteNumber((uint64_t)m_Blocks.size());
13427 if(m_FrameInUseCount > 0)
13429 json.WriteString(
"FrameInUseCount");
13430 json.WriteNumber(m_FrameInUseCount);
13433 if(m_Algorithm != 0)
13435 json.WriteString(
"Algorithm");
13436 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13441 json.WriteString(
"PreferredBlockSize");
13442 json.WriteNumber(m_PreferredBlockSize);
13445 json.WriteString(
"Blocks");
13446 json.BeginObject();
13447 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13449 json.BeginString();
13450 json.ContinueString(m_Blocks[i]->GetId());
13453 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13460 #endif // #if VMA_STATS_STRING_ENABLED
13462 void VmaBlockVector::Defragment(
13463 class VmaBlockVectorDefragmentationContext* pCtx,
13465 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13466 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13467 VkCommandBuffer commandBuffer)
13469 pCtx->res = VK_SUCCESS;
13471 const VkMemoryPropertyFlags memPropFlags =
13472 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13473 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13475 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13477 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13478 !IsCorruptionDetectionEnabled() &&
13479 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13482 if(canDefragmentOnCpu || canDefragmentOnGpu)
13484 bool defragmentOnGpu;
13486 if(canDefragmentOnGpu != canDefragmentOnCpu)
13488 defragmentOnGpu = canDefragmentOnGpu;
13493 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13494 m_hAllocator->IsIntegratedGpu();
13497 bool overlappingMoveSupported = !defragmentOnGpu;
13499 if(m_hAllocator->m_UseMutex)
13503 if(!m_Mutex.TryLockWrite())
13505 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13511 m_Mutex.LockWrite();
13512 pCtx->mutexLocked =
true;
13516 pCtx->Begin(overlappingMoveSupported, flags);
13520 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13521 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13522 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13525 if(pStats != VMA_NULL)
13527 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13528 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13531 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13532 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13533 if(defragmentOnGpu)
13535 maxGpuBytesToMove -= bytesMoved;
13536 maxGpuAllocationsToMove -= allocationsMoved;
13540 maxCpuBytesToMove -= bytesMoved;
13541 maxCpuAllocationsToMove -= allocationsMoved;
13547 if(m_hAllocator->m_UseMutex)
13548 m_Mutex.UnlockWrite();
13550 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13551 pCtx->res = VK_NOT_READY;
13556 if(pCtx->res >= VK_SUCCESS)
13558 if(defragmentOnGpu)
13560 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13564 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13570 void VmaBlockVector::DefragmentationEnd(
13571 class VmaBlockVectorDefragmentationContext* pCtx,
13577 VMA_ASSERT(pCtx->mutexLocked ==
false);
13581 m_Mutex.LockWrite();
13582 pCtx->mutexLocked =
true;
13586 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13589 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13591 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13592 if(blockCtx.hBuffer)
13594 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13598 if(pCtx->res >= VK_SUCCESS)
13600 FreeEmptyBlocks(pStats);
13604 if(pCtx->mutexLocked)
13606 VMA_ASSERT(m_hAllocator->m_UseMutex);
13607 m_Mutex.UnlockWrite();
13611 uint32_t VmaBlockVector::ProcessDefragmentations(
13612 class VmaBlockVectorDefragmentationContext *pCtx,
13615 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13617 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13619 for(uint32_t i = 0; i < moveCount; ++ i)
13621 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13624 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13625 pMove->
offset = move.dstOffset;
13630 pCtx->defragmentationMovesProcessed += moveCount;
13635 void VmaBlockVector::CommitDefragmentations(
13636 class VmaBlockVectorDefragmentationContext *pCtx,
13639 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13641 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13643 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13645 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13646 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13649 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13650 FreeEmptyBlocks(pStats);
13653 size_t VmaBlockVector::CalcAllocationCount()
const
13656 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13658 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13663 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13665 if(m_BufferImageGranularity == 1)
13669 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13670 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13672 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13673 VMA_ASSERT(m_Algorithm == 0);
13674 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13675 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13683 void VmaBlockVector::MakePoolAllocationsLost(
13684 uint32_t currentFrameIndex,
13685 size_t* pLostAllocationCount)
13687 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13688 size_t lostAllocationCount = 0;
13689 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13691 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13692 VMA_ASSERT(pBlock);
13693 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13695 if(pLostAllocationCount != VMA_NULL)
13697 *pLostAllocationCount = lostAllocationCount;
13701 VkResult VmaBlockVector::CheckCorruption()
13703 if(!IsCorruptionDetectionEnabled())
13705 return VK_ERROR_FEATURE_NOT_PRESENT;
13708 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13709 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13711 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13712 VMA_ASSERT(pBlock);
13713 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13714 if(res != VK_SUCCESS)
13722 void VmaBlockVector::AddStats(
VmaStats* pStats)
13724 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13725 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13727 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13729 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13731 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13732 VMA_ASSERT(pBlock);
13733 VMA_HEAVY_ASSERT(pBlock->Validate());
13735 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13736 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13737 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13738 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13745 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13747 VmaBlockVector* pBlockVector,
13748 uint32_t currentFrameIndex,
13749 bool overlappingMoveSupported) :
13750 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13751 m_AllocationCount(0),
13752 m_AllAllocations(false),
13754 m_AllocationsMoved(0),
13755 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13758 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13759 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13761 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13762 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13763 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13764 m_Blocks.push_back(pBlockInfo);
13768 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13771 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13773 for(
size_t i = m_Blocks.size(); i--; )
13775 vma_delete(m_hAllocator, m_Blocks[i]);
13779 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13782 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13784 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13785 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13786 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13788 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13789 (*it)->m_Allocations.push_back(allocInfo);
13796 ++m_AllocationCount;
13800 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13801 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13802 VkDeviceSize maxBytesToMove,
13803 uint32_t maxAllocationsToMove,
13804 bool freeOldAllocations)
13806 if(m_Blocks.empty())
13819 size_t srcBlockMinIndex = 0;
13832 size_t srcBlockIndex = m_Blocks.size() - 1;
13833 size_t srcAllocIndex = SIZE_MAX;
13839 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13841 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13844 if(srcBlockIndex == srcBlockMinIndex)
13851 srcAllocIndex = SIZE_MAX;
13856 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13860 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13861 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13863 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13864 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13865 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13866 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13869 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13871 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13872 VmaAllocationRequest dstAllocRequest;
13873 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13874 m_CurrentFrameIndex,
13875 m_pBlockVector->GetFrameInUseCount(),
13876 m_pBlockVector->GetBufferImageGranularity(),
13883 &dstAllocRequest) &&
13885 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13887 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13890 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13891 (m_BytesMoved + size > maxBytesToMove))
13896 VmaDefragmentationMove move = {};
13897 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13898 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13899 move.srcOffset = srcOffset;
13900 move.dstOffset = dstAllocRequest.offset;
13902 move.hAllocation = allocInfo.m_hAllocation;
13903 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13904 move.pDstBlock = pDstBlockInfo->m_pBlock;
13906 moves.push_back(move);
13908 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13912 allocInfo.m_hAllocation);
13914 if(freeOldAllocations)
13916 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13917 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13920 if(allocInfo.m_pChanged != VMA_NULL)
13922 *allocInfo.m_pChanged = VK_TRUE;
13925 ++m_AllocationsMoved;
13926 m_BytesMoved += size;
13928 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13936 if(srcAllocIndex > 0)
13942 if(srcBlockIndex > 0)
13945 srcAllocIndex = SIZE_MAX;
13955 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13958 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13960 if(m_Blocks[i]->m_HasNonMovableAllocations)
13968 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13969 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13970 VkDeviceSize maxBytesToMove,
13971 uint32_t maxAllocationsToMove,
13974 if(!m_AllAllocations && m_AllocationCount == 0)
13979 const size_t blockCount = m_Blocks.size();
13980 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13982 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13984 if(m_AllAllocations)
13986 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13987 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13988 it != pMetadata->m_Suballocations.end();
13991 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13993 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13994 pBlockInfo->m_Allocations.push_back(allocInfo);
13999 pBlockInfo->CalcHasNonMovableAllocations();
14003 pBlockInfo->SortAllocationsByOffsetDescending();
14009 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14012 const uint32_t roundCount = 2;
14015 VkResult result = VK_SUCCESS;
14016 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14024 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14025 size_t dstBlockIndex, VkDeviceSize dstOffset,
14026 size_t srcBlockIndex, VkDeviceSize srcOffset)
14028 if(dstBlockIndex < srcBlockIndex)
14032 if(dstBlockIndex > srcBlockIndex)
14036 if(dstOffset < srcOffset)
14046 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14048 VmaBlockVector* pBlockVector,
14049 uint32_t currentFrameIndex,
14050 bool overlappingMoveSupported) :
14051 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14052 m_OverlappingMoveSupported(overlappingMoveSupported),
14053 m_AllocationCount(0),
14054 m_AllAllocations(false),
14056 m_AllocationsMoved(0),
14057 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14059 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14063 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14067 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14068 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14069 VkDeviceSize maxBytesToMove,
14070 uint32_t maxAllocationsToMove,
14073 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14075 const size_t blockCount = m_pBlockVector->GetBlockCount();
14076 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14081 PreprocessMetadata();
14085 m_BlockInfos.resize(blockCount);
14086 for(
size_t i = 0; i < blockCount; ++i)
14088 m_BlockInfos[i].origBlockIndex = i;
14091 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14092 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14093 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14098 FreeSpaceDatabase freeSpaceDb;
14100 size_t dstBlockInfoIndex = 0;
14101 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14102 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14103 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14104 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14105 VkDeviceSize dstOffset = 0;
14108 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14110 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14111 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14112 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14113 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14114 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14116 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14117 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14118 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14119 if(m_AllocationsMoved == maxAllocationsToMove ||
14120 m_BytesMoved + srcAllocSize > maxBytesToMove)
14125 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14127 VmaDefragmentationMove move = {};
14129 size_t freeSpaceInfoIndex;
14130 VkDeviceSize dstAllocOffset;
14131 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14132 freeSpaceInfoIndex, dstAllocOffset))
14134 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14135 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14136 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14139 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14141 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14145 VmaSuballocation suballoc = *srcSuballocIt;
14146 suballoc.offset = dstAllocOffset;
14147 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14148 m_BytesMoved += srcAllocSize;
14149 ++m_AllocationsMoved;
14151 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14153 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14154 srcSuballocIt = nextSuballocIt;
14156 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14158 move.srcBlockIndex = srcOrigBlockIndex;
14159 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14160 move.srcOffset = srcAllocOffset;
14161 move.dstOffset = dstAllocOffset;
14162 move.size = srcAllocSize;
14164 moves.push_back(move);
14171 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14173 VmaSuballocation suballoc = *srcSuballocIt;
14174 suballoc.offset = dstAllocOffset;
14175 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14176 m_BytesMoved += srcAllocSize;
14177 ++m_AllocationsMoved;
14179 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14181 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14182 srcSuballocIt = nextSuballocIt;
14184 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14186 move.srcBlockIndex = srcOrigBlockIndex;
14187 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14188 move.srcOffset = srcAllocOffset;
14189 move.dstOffset = dstAllocOffset;
14190 move.size = srcAllocSize;
14192 moves.push_back(move);
14197 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14200 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14201 dstAllocOffset + srcAllocSize > dstBlockSize)
14204 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14206 ++dstBlockInfoIndex;
14207 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14208 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14209 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14210 dstBlockSize = pDstMetadata->GetSize();
14212 dstAllocOffset = 0;
14216 if(dstBlockInfoIndex == srcBlockInfoIndex)
14218 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14220 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14222 bool skipOver = overlap;
14223 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14227 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14232 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14234 dstOffset = srcAllocOffset + srcAllocSize;
14240 srcSuballocIt->offset = dstAllocOffset;
14241 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14242 dstOffset = dstAllocOffset + srcAllocSize;
14243 m_BytesMoved += srcAllocSize;
14244 ++m_AllocationsMoved;
14247 move.srcBlockIndex = srcOrigBlockIndex;
14248 move.dstBlockIndex = dstOrigBlockIndex;
14249 move.srcOffset = srcAllocOffset;
14250 move.dstOffset = dstAllocOffset;
14251 move.size = srcAllocSize;
14253 moves.push_back(move);
14261 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14262 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14264 VmaSuballocation suballoc = *srcSuballocIt;
14265 suballoc.offset = dstAllocOffset;
14266 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14267 dstOffset = dstAllocOffset + srcAllocSize;
14268 m_BytesMoved += srcAllocSize;
14269 ++m_AllocationsMoved;
14271 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14273 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14274 srcSuballocIt = nextSuballocIt;
14276 pDstMetadata->m_Suballocations.push_back(suballoc);
14278 move.srcBlockIndex = srcOrigBlockIndex;
14279 move.dstBlockIndex = dstOrigBlockIndex;
14280 move.srcOffset = srcAllocOffset;
14281 move.dstOffset = dstAllocOffset;
14282 move.size = srcAllocSize;
14284 moves.push_back(move);
14290 m_BlockInfos.clear();
14292 PostprocessMetadata();
14297 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14299 const size_t blockCount = m_pBlockVector->GetBlockCount();
14300 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14302 VmaBlockMetadata_Generic*
const pMetadata =
14303 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14304 pMetadata->m_FreeCount = 0;
14305 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14306 pMetadata->m_FreeSuballocationsBySize.clear();
14307 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14308 it != pMetadata->m_Suballocations.end(); )
14310 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14312 VmaSuballocationList::iterator nextIt = it;
14314 pMetadata->m_Suballocations.erase(it);
14325 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14327 const size_t blockCount = m_pBlockVector->GetBlockCount();
14328 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14330 VmaBlockMetadata_Generic*
const pMetadata =
14331 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14332 const VkDeviceSize blockSize = pMetadata->GetSize();
14335 if(pMetadata->m_Suballocations.empty())
14337 pMetadata->m_FreeCount = 1;
14339 VmaSuballocation suballoc = {
14343 VMA_SUBALLOCATION_TYPE_FREE };
14344 pMetadata->m_Suballocations.push_back(suballoc);
14345 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14350 VkDeviceSize offset = 0;
14351 VmaSuballocationList::iterator it;
14352 for(it = pMetadata->m_Suballocations.begin();
14353 it != pMetadata->m_Suballocations.end();
14356 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14357 VMA_ASSERT(it->offset >= offset);
14360 if(it->offset > offset)
14362 ++pMetadata->m_FreeCount;
14363 const VkDeviceSize freeSize = it->offset - offset;
14364 VmaSuballocation suballoc = {
14368 VMA_SUBALLOCATION_TYPE_FREE };
14369 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14370 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14372 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14376 pMetadata->m_SumFreeSize -= it->size;
14377 offset = it->offset + it->size;
14381 if(offset < blockSize)
14383 ++pMetadata->m_FreeCount;
14384 const VkDeviceSize freeSize = blockSize - offset;
14385 VmaSuballocation suballoc = {
14389 VMA_SUBALLOCATION_TYPE_FREE };
14390 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14391 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14392 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14394 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14399 pMetadata->m_FreeSuballocationsBySize.begin(),
14400 pMetadata->m_FreeSuballocationsBySize.end(),
14401 VmaSuballocationItemSizeLess());
14404 VMA_HEAVY_ASSERT(pMetadata->Validate());
14408 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14411 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14412 while(it != pMetadata->m_Suballocations.end())
14414 if(it->offset < suballoc.offset)
14419 pMetadata->m_Suballocations.insert(it, suballoc);
14425 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14428 VmaBlockVector* pBlockVector,
14429 uint32_t currFrameIndex) :
14431 mutexLocked(false),
14432 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14433 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14434 defragmentationMovesProcessed(0),
14435 defragmentationMovesCommitted(0),
14436 hasDefragmentationPlan(0),
14437 m_hAllocator(hAllocator),
14438 m_hCustomPool(hCustomPool),
14439 m_pBlockVector(pBlockVector),
14440 m_CurrFrameIndex(currFrameIndex),
14441 m_pAlgorithm(VMA_NULL),
14442 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14443 m_AllAllocations(false)
14447 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14449 vma_delete(m_hAllocator, m_pAlgorithm);
14452 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14454 AllocInfo info = { hAlloc, pChanged };
14455 m_Allocations.push_back(info);
14458 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14460 const bool allAllocations = m_AllAllocations ||
14461 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14474 if(VMA_DEBUG_MARGIN == 0 &&
14476 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14479 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14480 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14484 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14485 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14490 m_pAlgorithm->AddAll();
14494 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14496 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14504 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14506 uint32_t currFrameIndex,
14509 m_hAllocator(hAllocator),
14510 m_CurrFrameIndex(currFrameIndex),
14513 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14515 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14518 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14520 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14522 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14523 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14524 vma_delete(m_hAllocator, pBlockVectorCtx);
14526 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14528 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14529 if(pBlockVectorCtx)
14531 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14532 vma_delete(m_hAllocator, pBlockVectorCtx);
14537 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14539 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14541 VmaPool pool = pPools[poolIndex];
14544 if(pool->m_BlockVector.GetAlgorithm() == 0)
14546 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14548 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14550 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14552 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14557 if(!pBlockVectorDefragCtx)
14559 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14562 &pool->m_BlockVector,
14564 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14567 pBlockVectorDefragCtx->AddAll();
14572 void VmaDefragmentationContext_T::AddAllocations(
14573 uint32_t allocationCount,
14575 VkBool32* pAllocationsChanged)
14578 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14581 VMA_ASSERT(hAlloc);
14583 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14585 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14587 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14589 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14591 if(hAllocPool != VK_NULL_HANDLE)
14594 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14596 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14598 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14600 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14604 if(!pBlockVectorDefragCtx)
14606 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14609 &hAllocPool->m_BlockVector,
14611 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14618 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14619 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14620 if(!pBlockVectorDefragCtx)
14622 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14625 m_hAllocator->m_pBlockVectors[memTypeIndex],
14627 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14631 if(pBlockVectorDefragCtx)
14633 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14634 &pAllocationsChanged[allocIndex] : VMA_NULL;
14635 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14641 VkResult VmaDefragmentationContext_T::Defragment(
14642 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14643 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14655 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14656 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14658 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14659 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14661 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14662 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14665 return VK_NOT_READY;
14668 if(commandBuffer == VK_NULL_HANDLE)
14670 maxGpuBytesToMove = 0;
14671 maxGpuAllocationsToMove = 0;
14674 VkResult res = VK_SUCCESS;
14677 for(uint32_t memTypeIndex = 0;
14678 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14681 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14682 if(pBlockVectorCtx)
14684 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14685 pBlockVectorCtx->GetBlockVector()->Defragment(
14688 maxCpuBytesToMove, maxCpuAllocationsToMove,
14689 maxGpuBytesToMove, maxGpuAllocationsToMove,
14691 if(pBlockVectorCtx->res != VK_SUCCESS)
14693 res = pBlockVectorCtx->res;
14699 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14700 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14703 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14704 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14705 pBlockVectorCtx->GetBlockVector()->Defragment(
14708 maxCpuBytesToMove, maxCpuAllocationsToMove,
14709 maxGpuBytesToMove, maxGpuAllocationsToMove,
14711 if(pBlockVectorCtx->res != VK_SUCCESS)
14713 res = pBlockVectorCtx->res;
14726 for(uint32_t memTypeIndex = 0;
14727 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14730 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14731 if(pBlockVectorCtx)
14733 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14735 if(!pBlockVectorCtx->hasDefragmentationPlan)
14737 pBlockVectorCtx->GetBlockVector()->Defragment(
14740 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14741 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14744 if(pBlockVectorCtx->res < VK_SUCCESS)
14747 pBlockVectorCtx->hasDefragmentationPlan =
true;
14750 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14752 pCurrentMove, movesLeft);
14754 movesLeft -= processed;
14755 pCurrentMove += processed;
14760 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14761 customCtxIndex < customCtxCount;
14764 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14765 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14767 if(!pBlockVectorCtx->hasDefragmentationPlan)
14769 pBlockVectorCtx->GetBlockVector()->Defragment(
14772 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14773 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14776 if(pBlockVectorCtx->res < VK_SUCCESS)
14779 pBlockVectorCtx->hasDefragmentationPlan =
true;
14782 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14784 pCurrentMove, movesLeft);
14786 movesLeft -= processed;
14787 pCurrentMove += processed;
14794 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14796 VkResult res = VK_SUCCESS;
14799 for(uint32_t memTypeIndex = 0;
14800 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14803 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14804 if(pBlockVectorCtx)
14806 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14808 if(!pBlockVectorCtx->hasDefragmentationPlan)
14810 res = VK_NOT_READY;
14814 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14815 pBlockVectorCtx, m_pStats);
14817 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14818 res = VK_NOT_READY;
14823 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14824 customCtxIndex < customCtxCount;
14827 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14828 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14830 if(!pBlockVectorCtx->hasDefragmentationPlan)
14832 res = VK_NOT_READY;
14836 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14837 pBlockVectorCtx, m_pStats);
14839 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14840 res = VK_NOT_READY;
14849 #if VMA_RECORDING_ENABLED
14851 VmaRecorder::VmaRecorder() :
14855 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
14861 m_UseMutex = useMutex;
14862 m_Flags = settings.
flags;
14864 #if defined(_WIN32)
14866 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14870 return VK_ERROR_INITIALIZATION_FAILED;
14874 m_File = fopen(settings.
pFilePath,
"wb");
14878 return VK_ERROR_INITIALIZATION_FAILED;
14883 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14884 fprintf(m_File,
"%s\n",
"1,8");
14889 VmaRecorder::~VmaRecorder()
14891 if(m_File != VMA_NULL)
14897 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14899 CallParams callParams;
14900 GetBasicParams(callParams);
14902 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14903 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14907 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14909 CallParams callParams;
14910 GetBasicParams(callParams);
14912 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14913 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14919 CallParams callParams;
14920 GetBasicParams(callParams);
14922 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14923 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14934 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14936 CallParams callParams;
14937 GetBasicParams(callParams);
14939 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14940 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14945 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14946 const VkMemoryRequirements& vkMemReq,
14950 CallParams callParams;
14951 GetBasicParams(callParams);
14953 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14954 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14955 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14957 vkMemReq.alignment,
14958 vkMemReq.memoryTypeBits,
14966 userDataStr.GetString());
14970 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14971 const VkMemoryRequirements& vkMemReq,
14973 uint64_t allocationCount,
14976 CallParams callParams;
14977 GetBasicParams(callParams);
14979 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14980 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14981 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14983 vkMemReq.alignment,
14984 vkMemReq.memoryTypeBits,
14991 PrintPointerList(allocationCount, pAllocations);
14992 fprintf(m_File,
",%s\n", userDataStr.GetString());
14996 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14997 const VkMemoryRequirements& vkMemReq,
14998 bool requiresDedicatedAllocation,
14999 bool prefersDedicatedAllocation,
15003 CallParams callParams;
15004 GetBasicParams(callParams);
15006 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15007 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15008 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15010 vkMemReq.alignment,
15011 vkMemReq.memoryTypeBits,
15012 requiresDedicatedAllocation ? 1 : 0,
15013 prefersDedicatedAllocation ? 1 : 0,
15021 userDataStr.GetString());
15025 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15026 const VkMemoryRequirements& vkMemReq,
15027 bool requiresDedicatedAllocation,
15028 bool prefersDedicatedAllocation,
15032 CallParams callParams;
15033 GetBasicParams(callParams);
15035 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15036 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15037 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15039 vkMemReq.alignment,
15040 vkMemReq.memoryTypeBits,
15041 requiresDedicatedAllocation ? 1 : 0,
15042 prefersDedicatedAllocation ? 1 : 0,
15050 userDataStr.GetString());
15054 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15057 CallParams callParams;
15058 GetBasicParams(callParams);
15060 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15061 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15066 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15067 uint64_t allocationCount,
15070 CallParams callParams;
15071 GetBasicParams(callParams);
15073 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15074 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15075 PrintPointerList(allocationCount, pAllocations);
15076 fprintf(m_File,
"\n");
15080 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15082 const void* pUserData)
15084 CallParams callParams;
15085 GetBasicParams(callParams);
15087 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15088 UserDataString userDataStr(
15091 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15093 userDataStr.GetString());
15097 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15100 CallParams callParams;
15101 GetBasicParams(callParams);
15103 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15104 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15109 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15112 CallParams callParams;
15113 GetBasicParams(callParams);
15115 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15116 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15121 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15124 CallParams callParams;
15125 GetBasicParams(callParams);
15127 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15128 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15133 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15134 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15136 CallParams callParams;
15137 GetBasicParams(callParams);
15139 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15140 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15147 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15148 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15150 CallParams callParams;
15151 GetBasicParams(callParams);
15153 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15154 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15161 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15162 const VkBufferCreateInfo& bufCreateInfo,
15166 CallParams callParams;
15167 GetBasicParams(callParams);
15169 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15170 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15171 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15172 bufCreateInfo.flags,
15173 bufCreateInfo.size,
15174 bufCreateInfo.usage,
15175 bufCreateInfo.sharingMode,
15176 allocCreateInfo.
flags,
15177 allocCreateInfo.
usage,
15181 allocCreateInfo.
pool,
15183 userDataStr.GetString());
15187 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15188 const VkImageCreateInfo& imageCreateInfo,
15192 CallParams callParams;
15193 GetBasicParams(callParams);
15195 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15196 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15197 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15198 imageCreateInfo.flags,
15199 imageCreateInfo.imageType,
15200 imageCreateInfo.format,
15201 imageCreateInfo.extent.width,
15202 imageCreateInfo.extent.height,
15203 imageCreateInfo.extent.depth,
15204 imageCreateInfo.mipLevels,
15205 imageCreateInfo.arrayLayers,
15206 imageCreateInfo.samples,
15207 imageCreateInfo.tiling,
15208 imageCreateInfo.usage,
15209 imageCreateInfo.sharingMode,
15210 imageCreateInfo.initialLayout,
15211 allocCreateInfo.
flags,
15212 allocCreateInfo.
usage,
15216 allocCreateInfo.
pool,
15218 userDataStr.GetString());
15222 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15225 CallParams callParams;
15226 GetBasicParams(callParams);
15228 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15229 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15234 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15237 CallParams callParams;
15238 GetBasicParams(callParams);
15240 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15241 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15246 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15249 CallParams callParams;
15250 GetBasicParams(callParams);
15252 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15253 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15258 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15261 CallParams callParams;
15262 GetBasicParams(callParams);
15264 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15265 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15270 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15273 CallParams callParams;
15274 GetBasicParams(callParams);
15276 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15277 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15282 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15286 CallParams callParams;
15287 GetBasicParams(callParams);
15289 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15290 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15293 fprintf(m_File,
",");
15295 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15305 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15308 CallParams callParams;
15309 GetBasicParams(callParams);
15311 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15312 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15317 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15321 CallParams callParams;
15322 GetBasicParams(callParams);
15324 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15325 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15326 pool, name != VMA_NULL ? name :
"");
15332 if(pUserData != VMA_NULL)
15336 m_Str = (
const char*)pUserData;
15341 snprintf(m_PtrStr, 17,
"%p", pUserData);
15351 void VmaRecorder::WriteConfiguration(
15352 const VkPhysicalDeviceProperties& devProps,
15353 const VkPhysicalDeviceMemoryProperties& memProps,
15354 uint32_t vulkanApiVersion,
15355 bool dedicatedAllocationExtensionEnabled,
15356 bool bindMemory2ExtensionEnabled,
15357 bool memoryBudgetExtensionEnabled,
15358 bool deviceCoherentMemoryExtensionEnabled)
15360 fprintf(m_File,
"Config,Begin\n");
15362 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15364 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15365 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15366 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15367 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15368 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15369 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15371 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15372 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15373 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15375 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15376 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15378 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15379 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15381 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15382 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15384 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15385 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15388 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15389 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15390 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15391 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15393 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15394 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15395 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15396 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15397 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15398 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15399 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15400 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15401 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15403 fprintf(m_File,
"Config,End\n");
15406 void VmaRecorder::GetBasicParams(CallParams& outParams)
15408 #if defined(_WIN32)
15409 outParams.threadId = GetCurrentThreadId();
15414 std::thread::id thread_id = std::this_thread::get_id();
15415 stringstream thread_id_to_string_converter;
15416 thread_id_to_string_converter << thread_id;
15417 string thread_id_as_string = thread_id_to_string_converter.str();
15418 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15421 auto current_time = std::chrono::high_resolution_clock::now();
15423 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15426 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15430 fprintf(m_File,
"%p", pItems[0]);
15431 for(uint64_t i = 1; i < count; ++i)
15433 fprintf(m_File,
" %p", pItems[i]);
15438 void VmaRecorder::Flush()
15446 #endif // #if VMA_RECORDING_ENABLED
15451 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15452 m_Allocator(pAllocationCallbacks, 1024)
15456 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15458 VmaMutexLock mutexLock(m_Mutex);
15459 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15462 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15464 VmaMutexLock mutexLock(m_Mutex);
15465 m_Allocator.Free(hAlloc);
15473 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15479 m_hDevice(pCreateInfo->device),
15480 m_hInstance(pCreateInfo->instance),
15481 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15482 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15483 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15484 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15485 m_HeapSizeLimitMask(0),
15486 m_PreferredLargeHeapBlockSize(0),
15487 m_PhysicalDevice(pCreateInfo->physicalDevice),
15488 m_CurrentFrameIndex(0),
15489 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15490 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15492 m_GlobalMemoryTypeBits(UINT32_MAX)
15494 ,m_pRecorder(VMA_NULL)
15497 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15499 m_UseKhrDedicatedAllocation =
false;
15500 m_UseKhrBindMemory2 =
false;
15503 if(VMA_DEBUG_DETECT_CORRUPTION)
15506 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15511 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15513 #if !(VMA_DEDICATED_ALLOCATION)
15516 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15519 #if !(VMA_BIND_MEMORY2)
15522 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15526 #if !(VMA_MEMORY_BUDGET)
15529 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15532 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15533 if(m_UseKhrBufferDeviceAddress)
15535 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15538 #if VMA_VULKAN_VERSION < 1002000
15539 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15541 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15544 #if VMA_VULKAN_VERSION < 1001000
15545 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15547 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15551 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15552 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15553 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15555 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15556 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15557 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15568 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15569 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15571 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15572 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15573 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15574 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15579 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15583 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15585 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15586 if(limit != VK_WHOLE_SIZE)
15588 m_HeapSizeLimitMask |= 1u << heapIndex;
15589 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15591 m_MemProps.memoryHeaps[heapIndex].size = limit;
15597 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15599 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15601 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15605 preferredBlockSize,
15608 GetBufferImageGranularity(),
15614 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15621 VkResult res = VK_SUCCESS;
15626 #if VMA_RECORDING_ENABLED
15627 m_pRecorder = vma_new(
this, VmaRecorder)();
15629 if(res != VK_SUCCESS)
15633 m_pRecorder->WriteConfiguration(
15634 m_PhysicalDeviceProperties,
15636 m_VulkanApiVersion,
15637 m_UseKhrDedicatedAllocation,
15638 m_UseKhrBindMemory2,
15639 m_UseExtMemoryBudget,
15640 m_UseAmdDeviceCoherentMemory);
15641 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15643 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15644 return VK_ERROR_FEATURE_NOT_PRESENT;
15648 #if VMA_MEMORY_BUDGET
15649 if(m_UseExtMemoryBudget)
15651 UpdateVulkanBudget();
15653 #endif // #if VMA_MEMORY_BUDGET
15658 VmaAllocator_T::~VmaAllocator_T()
15660 #if VMA_RECORDING_ENABLED
15661 if(m_pRecorder != VMA_NULL)
15663 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15664 vma_delete(
this, m_pRecorder);
15668 VMA_ASSERT(m_Pools.empty());
15670 for(
size_t i = GetMemoryTypeCount(); i--; )
15672 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15674 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15677 vma_delete(
this, m_pDedicatedAllocations[i]);
15678 vma_delete(
this, m_pBlockVectors[i]);
15682 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15684 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15685 ImportVulkanFunctions_Static();
15688 if(pVulkanFunctions != VMA_NULL)
15690 ImportVulkanFunctions_Custom(pVulkanFunctions);
15693 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15694 ImportVulkanFunctions_Dynamic();
15697 ValidateVulkanFunctions();
15700 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15702 void VmaAllocator_T::ImportVulkanFunctions_Static()
15705 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15706 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15707 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15708 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15709 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15710 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15711 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15712 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15713 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15714 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15715 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15716 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15717 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15718 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15719 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15720 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15721 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15724 #if VMA_VULKAN_VERSION >= 1001000
15725 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15727 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15728 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15729 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15730 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15731 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15736 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15738 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15740 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15742 #define VMA_COPY_IF_NOT_NULL(funcName) \
15743 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15745 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15746 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15747 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15748 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15749 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15750 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15751 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15752 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15753 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15754 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15755 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15756 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15757 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15758 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15759 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15760 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15761 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15763 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15764 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15765 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15768 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15769 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15770 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15773 #if VMA_MEMORY_BUDGET
15774 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15777 #undef VMA_COPY_IF_NOT_NULL
15780 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15782 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15784 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15785 if(m_VulkanFunctions.memberName == VMA_NULL) \
15786 m_VulkanFunctions.memberName = \
15787 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15788 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15789 if(m_VulkanFunctions.memberName == VMA_NULL) \
15790 m_VulkanFunctions.memberName = \
15791 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15793 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15794 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15795 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15796 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15797 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15798 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15799 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15800 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15801 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15802 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15803 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15804 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15805 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15806 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15807 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15808 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15809 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
15811 #if VMA_DEDICATED_ALLOCATION
15812 if(m_UseKhrDedicatedAllocation)
15814 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
15815 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
15819 #if VMA_BIND_MEMORY2
15820 if(m_UseKhrBindMemory2)
15822 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
15823 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
15825 #endif // #if VMA_BIND_MEMORY2
15827 #if VMA_MEMORY_BUDGET
15828 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15830 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15832 #endif // #if VMA_MEMORY_BUDGET
15834 #undef VMA_FETCH_DEVICE_FUNC
15835 #undef VMA_FETCH_INSTANCE_FUNC
15838 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15840 void VmaAllocator_T::ValidateVulkanFunctions()
15842 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15843 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15844 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15845 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15846 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15847 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15848 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15849 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15850 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15851 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15852 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15853 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15854 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15855 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15856 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15857 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15858 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15860 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15861 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15863 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15864 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15868 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15869 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15871 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15872 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15876 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15877 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15879 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15884 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15886 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15887 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15888 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15889 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15892 VkResult VmaAllocator_T::AllocateMemoryOfType(
15894 VkDeviceSize alignment,
15895 bool dedicatedAllocation,
15896 VkBuffer dedicatedBuffer,
15897 VkBufferUsageFlags dedicatedBufferUsage,
15898 VkImage dedicatedImage,
15900 uint32_t memTypeIndex,
15901 VmaSuballocationType suballocType,
15902 size_t allocationCount,
15905 VMA_ASSERT(pAllocations != VMA_NULL);
15906 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15912 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15922 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15923 VMA_ASSERT(blockVector);
15925 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15926 bool preferDedicatedMemory =
15927 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15928 dedicatedAllocation ||
15930 size > preferredBlockSize / 2;
15932 if(preferDedicatedMemory &&
15934 finalCreateInfo.
pool == VK_NULL_HANDLE)
15943 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15947 return AllocateDedicatedMemory(
15956 dedicatedBufferUsage,
15964 VkResult res = blockVector->Allocate(
15965 m_CurrentFrameIndex.load(),
15972 if(res == VK_SUCCESS)
15980 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15984 res = AllocateDedicatedMemory(
15993 dedicatedBufferUsage,
15997 if(res == VK_SUCCESS)
16000 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16006 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16013 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16015 VmaSuballocationType suballocType,
16016 uint32_t memTypeIndex,
16019 bool isUserDataString,
16021 VkBuffer dedicatedBuffer,
16022 VkBufferUsageFlags dedicatedBufferUsage,
16023 VkImage dedicatedImage,
16024 size_t allocationCount,
16027 VMA_ASSERT(allocationCount > 0 && pAllocations);
16031 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16033 GetBudget(&heapBudget, heapIndex, 1);
16034 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16036 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16040 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16041 allocInfo.memoryTypeIndex = memTypeIndex;
16042 allocInfo.allocationSize = size;
16044 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16045 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16046 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16048 if(dedicatedBuffer != VK_NULL_HANDLE)
16050 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16051 dedicatedAllocInfo.buffer = dedicatedBuffer;
16052 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16054 else if(dedicatedImage != VK_NULL_HANDLE)
16056 dedicatedAllocInfo.image = dedicatedImage;
16057 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16060 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16062 #if VMA_BUFFER_DEVICE_ADDRESS
16063 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16064 if(m_UseKhrBufferDeviceAddress)
16066 bool canContainBufferWithDeviceAddress =
true;
16067 if(dedicatedBuffer != VK_NULL_HANDLE)
16069 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16070 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16072 else if(dedicatedImage != VK_NULL_HANDLE)
16074 canContainBufferWithDeviceAddress =
false;
16076 if(canContainBufferWithDeviceAddress)
16078 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16079 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16082 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
16085 VkResult res = VK_SUCCESS;
16086 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16088 res = AllocateDedicatedMemoryPage(
16096 pAllocations + allocIndex);
16097 if(res != VK_SUCCESS)
16103 if(res == VK_SUCCESS)
16107 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16108 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16109 VMA_ASSERT(pDedicatedAllocations);
16110 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16112 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
16116 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16121 while(allocIndex--)
16124 VkDeviceMemory hMemory = currAlloc->GetMemory();
16136 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16137 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16138 currAlloc->SetUserData(
this, VMA_NULL);
16139 m_AllocationObjectAllocator.Free(currAlloc);
16142 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16148 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16150 VmaSuballocationType suballocType,
16151 uint32_t memTypeIndex,
16152 const VkMemoryAllocateInfo& allocInfo,
16154 bool isUserDataString,
16158 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16159 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16162 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16166 void* pMappedData = VMA_NULL;
16169 res = (*m_VulkanFunctions.vkMapMemory)(
16178 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16179 FreeVulkanMemory(memTypeIndex, size, hMemory);
16184 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16185 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16186 (*pAllocation)->SetUserData(
this, pUserData);
16187 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16188 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16190 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16196 void VmaAllocator_T::GetBufferMemoryRequirements(
16198 VkMemoryRequirements& memReq,
16199 bool& requiresDedicatedAllocation,
16200 bool& prefersDedicatedAllocation)
const
16202 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16203 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16205 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16206 memReqInfo.buffer = hBuffer;
16208 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16210 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16211 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16213 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16215 memReq = memReq2.memoryRequirements;
16216 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16217 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16220 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16222 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16223 requiresDedicatedAllocation =
false;
16224 prefersDedicatedAllocation =
false;
16228 void VmaAllocator_T::GetImageMemoryRequirements(
16230 VkMemoryRequirements& memReq,
16231 bool& requiresDedicatedAllocation,
16232 bool& prefersDedicatedAllocation)
const
16234 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16235 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16237 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16238 memReqInfo.image = hImage;
16240 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16242 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16243 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16245 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16247 memReq = memReq2.memoryRequirements;
16248 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16249 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16252 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16254 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16255 requiresDedicatedAllocation =
false;
16256 prefersDedicatedAllocation =
false;
16260 VkResult VmaAllocator_T::AllocateMemory(
16261 const VkMemoryRequirements& vkMemReq,
16262 bool requiresDedicatedAllocation,
16263 bool prefersDedicatedAllocation,
16264 VkBuffer dedicatedBuffer,
16265 VkBufferUsageFlags dedicatedBufferUsage,
16266 VkImage dedicatedImage,
16268 VmaSuballocationType suballocType,
16269 size_t allocationCount,
16272 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16274 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16276 if(vkMemReq.size == 0)
16278 return VK_ERROR_VALIDATION_FAILED_EXT;
16283 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16284 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16289 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16290 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16292 if(requiresDedicatedAllocation)
16296 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16297 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16299 if(createInfo.
pool != VK_NULL_HANDLE)
16301 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16302 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16305 if((createInfo.
pool != VK_NULL_HANDLE) &&
16308 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16309 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16312 if(createInfo.
pool != VK_NULL_HANDLE)
16314 const VkDeviceSize alignmentForPool = VMA_MAX(
16315 vkMemReq.alignment,
16316 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16321 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16326 return createInfo.
pool->m_BlockVector.Allocate(
16327 m_CurrentFrameIndex.load(),
16338 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16339 uint32_t memTypeIndex = UINT32_MAX;
16341 if(res == VK_SUCCESS)
16343 VkDeviceSize alignmentForMemType = VMA_MAX(
16344 vkMemReq.alignment,
16345 GetMemoryTypeMinAlignment(memTypeIndex));
16347 res = AllocateMemoryOfType(
16349 alignmentForMemType,
16350 requiresDedicatedAllocation || prefersDedicatedAllocation,
16352 dedicatedBufferUsage,
16360 if(res == VK_SUCCESS)
16370 memoryTypeBits &= ~(1u << memTypeIndex);
16373 if(res == VK_SUCCESS)
16375 alignmentForMemType = VMA_MAX(
16376 vkMemReq.alignment,
16377 GetMemoryTypeMinAlignment(memTypeIndex));
16379 res = AllocateMemoryOfType(
16381 alignmentForMemType,
16382 requiresDedicatedAllocation || prefersDedicatedAllocation,
16384 dedicatedBufferUsage,
16392 if(res == VK_SUCCESS)
16402 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16413 void VmaAllocator_T::FreeMemory(
16414 size_t allocationCount,
16417 VMA_ASSERT(pAllocations);
16419 for(
size_t allocIndex = allocationCount; allocIndex--; )
16423 if(allocation != VK_NULL_HANDLE)
16425 if(TouchAllocation(allocation))
16427 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16429 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16432 switch(allocation->GetType())
16434 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16436 VmaBlockVector* pBlockVector = VMA_NULL;
16437 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16438 if(hPool != VK_NULL_HANDLE)
16440 pBlockVector = &hPool->m_BlockVector;
16444 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16445 pBlockVector = m_pBlockVectors[memTypeIndex];
16447 pBlockVector->Free(allocation);
16450 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16451 FreeDedicatedMemory(allocation);
16459 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16460 allocation->SetUserData(
this, VMA_NULL);
16461 m_AllocationObjectAllocator.Free(allocation);
16466 VkResult VmaAllocator_T::ResizeAllocation(
16468 VkDeviceSize newSize)
16471 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16473 return VK_ERROR_VALIDATION_FAILED_EXT;
16475 if(newSize == alloc->GetSize())
16479 return VK_ERROR_OUT_OF_POOL_MEMORY;
16482 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16485 InitStatInfo(pStats->
total);
16486 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16488 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16492 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16494 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16495 VMA_ASSERT(pBlockVector);
16496 pBlockVector->AddStats(pStats);
16501 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16502 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16504 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16509 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16511 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16512 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16513 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16514 VMA_ASSERT(pDedicatedAllocVector);
16515 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16518 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16519 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16520 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16521 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16526 VmaPostprocessCalcStatInfo(pStats->
total);
16527 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16528 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16529 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16530 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16533 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16535 #if VMA_MEMORY_BUDGET
16536 if(m_UseExtMemoryBudget)
16538 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16540 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16541 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16543 const uint32_t heapIndex = firstHeap + i;
16545 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16548 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16550 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16551 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16555 outBudget->
usage = 0;
16559 outBudget->
budget = VMA_MIN(
16560 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16565 UpdateVulkanBudget();
16566 GetBudget(outBudget, firstHeap, heapCount);
16572 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16574 const uint32_t heapIndex = firstHeap + i;
16576 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16580 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16585 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16587 VkResult VmaAllocator_T::DefragmentationBegin(
16597 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16598 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16601 (*pContext)->AddAllocations(
16604 VkResult res = (*pContext)->Defragment(
16609 if(res != VK_NOT_READY)
16611 vma_delete(
this, *pContext);
16612 *pContext = VMA_NULL;
16618 VkResult VmaAllocator_T::DefragmentationEnd(
16621 vma_delete(
this, context);
16625 VkResult VmaAllocator_T::DefragmentationPassBegin(
16629 return context->DefragmentPassBegin(pInfo);
16631 VkResult VmaAllocator_T::DefragmentationPassEnd(
16634 return context->DefragmentPassEnd();
16640 if(hAllocation->CanBecomeLost())
16646 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16647 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16650 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16654 pAllocationInfo->
offset = 0;
16655 pAllocationInfo->
size = hAllocation->GetSize();
16657 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16660 else if(localLastUseFrameIndex == localCurrFrameIndex)
16662 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16663 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16664 pAllocationInfo->
offset = hAllocation->GetOffset();
16665 pAllocationInfo->
size = hAllocation->GetSize();
16667 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16672 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16674 localLastUseFrameIndex = localCurrFrameIndex;
16681 #if VMA_STATS_STRING_ENABLED
16682 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16683 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16686 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16687 if(localLastUseFrameIndex == localCurrFrameIndex)
16693 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16695 localLastUseFrameIndex = localCurrFrameIndex;
16701 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16702 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16703 pAllocationInfo->
offset = hAllocation->GetOffset();
16704 pAllocationInfo->
size = hAllocation->GetSize();
16705 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16706 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16710 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16713 if(hAllocation->CanBecomeLost())
16715 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16716 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16719 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16723 else if(localLastUseFrameIndex == localCurrFrameIndex)
16729 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16731 localLastUseFrameIndex = localCurrFrameIndex;
16738 #if VMA_STATS_STRING_ENABLED
16739 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16740 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16743 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16744 if(localLastUseFrameIndex == localCurrFrameIndex)
16750 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16752 localLastUseFrameIndex = localCurrFrameIndex;
16764 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16774 return VK_ERROR_INITIALIZATION_FAILED;
16778 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16780 return VK_ERROR_FEATURE_NOT_PRESENT;
16783 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16785 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16787 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16788 if(res != VK_SUCCESS)
16790 vma_delete(
this, *pPool);
16797 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16798 (*pPool)->SetId(m_NextPoolId++);
16799 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16805 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16809 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16810 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16811 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16814 vma_delete(
this, pool);
16819 pool->m_BlockVector.GetPoolStats(pPoolStats);
16822 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16824 m_CurrentFrameIndex.store(frameIndex);
16826 #if VMA_MEMORY_BUDGET
16827 if(m_UseExtMemoryBudget)
16829 UpdateVulkanBudget();
16831 #endif // #if VMA_MEMORY_BUDGET
16834 void VmaAllocator_T::MakePoolAllocationsLost(
16836 size_t* pLostAllocationCount)
16838 hPool->m_BlockVector.MakePoolAllocationsLost(
16839 m_CurrentFrameIndex.load(),
16840 pLostAllocationCount);
16843 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16845 return hPool->m_BlockVector.CheckCorruption();
16848 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16850 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16853 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16855 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16857 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16858 VMA_ASSERT(pBlockVector);
16859 VkResult localRes = pBlockVector->CheckCorruption();
16862 case VK_ERROR_FEATURE_NOT_PRESENT:
16865 finalRes = VK_SUCCESS;
16875 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16876 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16878 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16880 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16883 case VK_ERROR_FEATURE_NOT_PRESENT:
16886 finalRes = VK_SUCCESS;
16898 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16900 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16901 (*pAllocation)->InitLost();
16904 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16906 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16909 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16911 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16912 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16915 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16916 if(blockBytesAfterAllocation > heapSize)
16918 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16920 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16928 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16932 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16934 if(res == VK_SUCCESS)
16936 #if VMA_MEMORY_BUDGET
16937 ++m_Budget.m_OperationsSinceBudgetFetch;
16941 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16943 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
16948 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16954 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16957 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16959 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
16963 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16965 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16968 VkResult VmaAllocator_T::BindVulkanBuffer(
16969 VkDeviceMemory memory,
16970 VkDeviceSize memoryOffset,
16974 if(pNext != VMA_NULL)
16976 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16977 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16978 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
16980 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
16981 bindBufferMemoryInfo.pNext = pNext;
16982 bindBufferMemoryInfo.buffer = buffer;
16983 bindBufferMemoryInfo.memory = memory;
16984 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16985 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16988 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16990 return VK_ERROR_EXTENSION_NOT_PRESENT;
16995 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
16999 VkResult VmaAllocator_T::BindVulkanImage(
17000 VkDeviceMemory memory,
17001 VkDeviceSize memoryOffset,
17005 if(pNext != VMA_NULL)
17007 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17008 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17009 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17011 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17012 bindBufferMemoryInfo.pNext = pNext;
17013 bindBufferMemoryInfo.image = image;
17014 bindBufferMemoryInfo.memory = memory;
17015 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17016 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17019 #endif // #if VMA_BIND_MEMORY2
17021 return VK_ERROR_EXTENSION_NOT_PRESENT;
17026 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17030 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17032 if(hAllocation->CanBecomeLost())
17034 return VK_ERROR_MEMORY_MAP_FAILED;
17037 switch(hAllocation->GetType())
17039 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17041 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17042 char *pBytes = VMA_NULL;
17043 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17044 if(res == VK_SUCCESS)
17046 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17047 hAllocation->BlockAllocMap();
17051 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17052 return hAllocation->DedicatedAllocMap(
this, ppData);
17055 return VK_ERROR_MEMORY_MAP_FAILED;
17061 switch(hAllocation->GetType())
17063 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17065 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17066 hAllocation->BlockAllocUnmap();
17067 pBlock->Unmap(
this, 1);
17070 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17071 hAllocation->DedicatedAllocUnmap(
this);
17078 VkResult VmaAllocator_T::BindBufferMemory(
17080 VkDeviceSize allocationLocalOffset,
17084 VkResult res = VK_SUCCESS;
17085 switch(hAllocation->GetType())
17087 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17088 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17090 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17092 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17093 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17094 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17103 VkResult VmaAllocator_T::BindImageMemory(
17105 VkDeviceSize allocationLocalOffset,
17109 VkResult res = VK_SUCCESS;
17110 switch(hAllocation->GetType())
17112 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17113 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17115 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17117 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17118 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17119 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17128 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17130 VkDeviceSize offset, VkDeviceSize size,
17131 VMA_CACHE_OPERATION op)
17133 VkResult res = VK_SUCCESS;
17135 VkMappedMemoryRange memRange = {};
17136 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17140 case VMA_CACHE_FLUSH:
17141 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17143 case VMA_CACHE_INVALIDATE:
17144 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17154 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17155 uint32_t allocationCount,
17157 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17158 VMA_CACHE_OPERATION op)
17160 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17161 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17162 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17164 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17167 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17168 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17169 VkMappedMemoryRange newRange;
17170 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17172 ranges.push_back(newRange);
17176 VkResult res = VK_SUCCESS;
17177 if(!ranges.empty())
17181 case VMA_CACHE_FLUSH:
17182 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17184 case VMA_CACHE_INVALIDATE:
17185 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17195 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17197 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17199 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17201 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17202 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
17203 VMA_ASSERT(pDedicatedAllocations);
17204 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
17205 VMA_ASSERT(success);
17208 VkDeviceMemory hMemory = allocation->GetMemory();
17220 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17222 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17225 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17227 VkBufferCreateInfo dummyBufCreateInfo;
17228 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17230 uint32_t memoryTypeBits = 0;
17233 VkBuffer buf = VK_NULL_HANDLE;
17234 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17235 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17236 if(res == VK_SUCCESS)
17239 VkMemoryRequirements memReq;
17240 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17241 memoryTypeBits = memReq.memoryTypeBits;
17244 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17247 return memoryTypeBits;
17250 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17253 VMA_ASSERT(GetMemoryTypeCount() > 0);
17255 uint32_t memoryTypeBits = UINT32_MAX;
17257 if(!m_UseAmdDeviceCoherentMemory)
17260 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17262 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17264 memoryTypeBits &= ~(1u << memTypeIndex);
17269 return memoryTypeBits;
17272 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17274 VkDeviceSize offset, VkDeviceSize size,
17275 VkMappedMemoryRange& outRange)
const
17277 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17278 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17280 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17281 const VkDeviceSize allocationSize = allocation->GetSize();
17282 VMA_ASSERT(offset <= allocationSize);
17284 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17285 outRange.pNext = VMA_NULL;
17286 outRange.memory = allocation->GetMemory();
17288 switch(allocation->GetType())
17290 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17291 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17292 if(size == VK_WHOLE_SIZE)
17294 outRange.size = allocationSize - outRange.offset;
17298 VMA_ASSERT(offset + size <= allocationSize);
17299 outRange.size = VMA_MIN(
17300 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17301 allocationSize - outRange.offset);
17304 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17307 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17308 if(size == VK_WHOLE_SIZE)
17310 size = allocationSize - offset;
17314 VMA_ASSERT(offset + size <= allocationSize);
17316 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17319 const VkDeviceSize allocationOffset = allocation->GetOffset();
17320 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17321 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17322 outRange.offset += allocationOffset;
17323 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17335 #if VMA_MEMORY_BUDGET
17337 void VmaAllocator_T::UpdateVulkanBudget()
17339 VMA_ASSERT(m_UseExtMemoryBudget);
17341 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17343 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17344 VmaPnextChainPushFront(&memProps, &budgetProps);
17346 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17349 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17351 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17353 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17354 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17355 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17358 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17360 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17362 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17364 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17366 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17368 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17371 m_Budget.m_OperationsSinceBudgetFetch = 0;
17375 #endif // #if VMA_MEMORY_BUDGET
17377 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17379 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17380 !hAllocation->CanBecomeLost() &&
17381 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17383 void* pData = VMA_NULL;
17384 VkResult res = Map(hAllocation, &pData);
17385 if(res == VK_SUCCESS)
17387 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17388 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17389 Unmap(hAllocation);
17393 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17398 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17400 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17401 if(memoryTypeBits == UINT32_MAX)
17403 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17404 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17406 return memoryTypeBits;
17409 #if VMA_STATS_STRING_ENABLED
17411 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17413 bool dedicatedAllocationsStarted =
false;
17414 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17416 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17417 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17418 VMA_ASSERT(pDedicatedAllocVector);
17419 if(pDedicatedAllocVector->empty() ==
false)
17421 if(dedicatedAllocationsStarted ==
false)
17423 dedicatedAllocationsStarted =
true;
17424 json.WriteString(
"DedicatedAllocations");
17425 json.BeginObject();
17428 json.BeginString(
"Type ");
17429 json.ContinueString(memTypeIndex);
17434 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17436 json.BeginObject(
true);
17438 hAlloc->PrintParameters(json);
17445 if(dedicatedAllocationsStarted)
17451 bool allocationsStarted =
false;
17452 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17454 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17456 if(allocationsStarted ==
false)
17458 allocationsStarted =
true;
17459 json.WriteString(
"DefaultPools");
17460 json.BeginObject();
17463 json.BeginString(
"Type ");
17464 json.ContinueString(memTypeIndex);
17467 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17470 if(allocationsStarted)
17478 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17479 const size_t poolCount = m_Pools.size();
17482 json.WriteString(
"Pools");
17483 json.BeginObject();
17484 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17486 json.BeginString();
17487 json.ContinueString(m_Pools[poolIndex]->GetId());
17490 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17497 #endif // #if VMA_STATS_STRING_ENABLED
17506 VMA_ASSERT(pCreateInfo && pAllocator);
17509 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17511 return (*pAllocator)->Init(pCreateInfo);
17517 if(allocator != VK_NULL_HANDLE)
17519 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17520 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17521 vma_delete(&allocationCallbacks, allocator);
17527 VMA_ASSERT(allocator && pAllocatorInfo);
17528 pAllocatorInfo->
instance = allocator->m_hInstance;
17529 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17530 pAllocatorInfo->
device = allocator->m_hDevice;
17535 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17537 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17538 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17543 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17545 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17546 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17551 uint32_t memoryTypeIndex,
17552 VkMemoryPropertyFlags* pFlags)
17554 VMA_ASSERT(allocator && pFlags);
17555 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17556 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17561 uint32_t frameIndex)
17563 VMA_ASSERT(allocator);
17564 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17566 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17568 allocator->SetCurrentFrameIndex(frameIndex);
17575 VMA_ASSERT(allocator && pStats);
17576 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17577 allocator->CalculateStats(pStats);
17584 VMA_ASSERT(allocator && pBudget);
17585 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17586 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17589 #if VMA_STATS_STRING_ENABLED
17593 char** ppStatsString,
17594 VkBool32 detailedMap)
17596 VMA_ASSERT(allocator && ppStatsString);
17597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17599 VmaStringBuilder sb(allocator);
17601 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17602 json.BeginObject();
17605 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17608 allocator->CalculateStats(&stats);
17610 json.WriteString(
"Total");
17611 VmaPrintStatInfo(json, stats.
total);
17613 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17615 json.BeginString(
"Heap ");
17616 json.ContinueString(heapIndex);
17618 json.BeginObject();
17620 json.WriteString(
"Size");
17621 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17623 json.WriteString(
"Flags");
17624 json.BeginArray(
true);
17625 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17627 json.WriteString(
"DEVICE_LOCAL");
17631 json.WriteString(
"Budget");
17632 json.BeginObject();
17634 json.WriteString(
"BlockBytes");
17635 json.WriteNumber(budget[heapIndex].blockBytes);
17636 json.WriteString(
"AllocationBytes");
17637 json.WriteNumber(budget[heapIndex].allocationBytes);
17638 json.WriteString(
"Usage");
17639 json.WriteNumber(budget[heapIndex].usage);
17640 json.WriteString(
"Budget");
17641 json.WriteNumber(budget[heapIndex].budget);
17647 json.WriteString(
"Stats");
17648 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17651 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17653 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17655 json.BeginString(
"Type ");
17656 json.ContinueString(typeIndex);
17659 json.BeginObject();
17661 json.WriteString(
"Flags");
17662 json.BeginArray(
true);
17663 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17664 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17666 json.WriteString(
"DEVICE_LOCAL");
17668 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17670 json.WriteString(
"HOST_VISIBLE");
17672 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17674 json.WriteString(
"HOST_COHERENT");
17676 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17678 json.WriteString(
"HOST_CACHED");
17680 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17682 json.WriteString(
"LAZILY_ALLOCATED");
17684 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17686 json.WriteString(
" PROTECTED");
17688 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17690 json.WriteString(
" DEVICE_COHERENT");
17692 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17694 json.WriteString(
" DEVICE_UNCACHED");
17700 json.WriteString(
"Stats");
17701 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17710 if(detailedMap == VK_TRUE)
17712 allocator->PrintDetailedMap(json);
17718 const size_t len = sb.GetLength();
17719 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17722 memcpy(pChars, sb.GetData(), len);
17724 pChars[len] =
'\0';
17725 *ppStatsString = pChars;
17730 char* pStatsString)
17732 if(pStatsString != VMA_NULL)
17734 VMA_ASSERT(allocator);
17735 size_t len = strlen(pStatsString);
17736 vma_delete_array(allocator, pStatsString, len + 1);
17740 #endif // #if VMA_STATS_STRING_ENABLED
17747 uint32_t memoryTypeBits,
17749 uint32_t* pMemoryTypeIndex)
17751 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17752 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17753 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17755 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17762 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17763 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17764 uint32_t notPreferredFlags = 0;
17767 switch(pAllocationCreateInfo->
usage)
17772 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17774 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17778 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17781 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17782 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17784 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17788 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17789 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17792 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17795 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17804 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17806 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17809 *pMemoryTypeIndex = UINT32_MAX;
17810 uint32_t minCost = UINT32_MAX;
17811 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17812 memTypeIndex < allocator->GetMemoryTypeCount();
17813 ++memTypeIndex, memTypeBit <<= 1)
17816 if((memTypeBit & memoryTypeBits) != 0)
17818 const VkMemoryPropertyFlags currFlags =
17819 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17821 if((requiredFlags & ~currFlags) == 0)
17824 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17825 VmaCountBitsSet(currFlags & notPreferredFlags);
17827 if(currCost < minCost)
17829 *pMemoryTypeIndex = memTypeIndex;
17834 minCost = currCost;
17839 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17844 const VkBufferCreateInfo* pBufferCreateInfo,
17846 uint32_t* pMemoryTypeIndex)
17848 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17849 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17850 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17851 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17853 const VkDevice hDev = allocator->m_hDevice;
17854 VkBuffer hBuffer = VK_NULL_HANDLE;
17855 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17856 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17857 if(res == VK_SUCCESS)
17859 VkMemoryRequirements memReq = {};
17860 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17861 hDev, hBuffer, &memReq);
17865 memReq.memoryTypeBits,
17866 pAllocationCreateInfo,
17869 allocator->GetVulkanFunctions().vkDestroyBuffer(
17870 hDev, hBuffer, allocator->GetAllocationCallbacks());
17877 const VkImageCreateInfo* pImageCreateInfo,
17879 uint32_t* pMemoryTypeIndex)
17881 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17882 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17883 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17884 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17886 const VkDevice hDev = allocator->m_hDevice;
17887 VkImage hImage = VK_NULL_HANDLE;
17888 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17889 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17890 if(res == VK_SUCCESS)
17892 VkMemoryRequirements memReq = {};
17893 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17894 hDev, hImage, &memReq);
17898 memReq.memoryTypeBits,
17899 pAllocationCreateInfo,
17902 allocator->GetVulkanFunctions().vkDestroyImage(
17903 hDev, hImage, allocator->GetAllocationCallbacks());
17913 VMA_ASSERT(allocator && pCreateInfo && pPool);
17915 VMA_DEBUG_LOG(
"vmaCreatePool");
17917 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17919 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17921 #if VMA_RECORDING_ENABLED
17922 if(allocator->GetRecorder() != VMA_NULL)
17924 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17935 VMA_ASSERT(allocator);
17937 if(pool == VK_NULL_HANDLE)
17942 VMA_DEBUG_LOG(
"vmaDestroyPool");
17944 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17946 #if VMA_RECORDING_ENABLED
17947 if(allocator->GetRecorder() != VMA_NULL)
17949 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17953 allocator->DestroyPool(pool);
17961 VMA_ASSERT(allocator && pool && pPoolStats);
17963 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17965 allocator->GetPoolStats(pool, pPoolStats);
17971 size_t* pLostAllocationCount)
17973 VMA_ASSERT(allocator && pool);
17975 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17977 #if VMA_RECORDING_ENABLED
17978 if(allocator->GetRecorder() != VMA_NULL)
17980 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
17984 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
17989 VMA_ASSERT(allocator && pool);
17991 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17993 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
17995 return allocator->CheckPoolCorruption(pool);
18001 const char** ppName)
18003 VMA_ASSERT(allocator && pool && ppName);
18005 VMA_DEBUG_LOG(
"vmaGetPoolName");
18007 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18009 *ppName = pool->GetName();
18017 VMA_ASSERT(allocator && pool);
18019 VMA_DEBUG_LOG(
"vmaSetPoolName");
18021 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18023 pool->SetName(pName);
18025 #if VMA_RECORDING_ENABLED
18026 if(allocator->GetRecorder() != VMA_NULL)
18028 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18035 const VkMemoryRequirements* pVkMemoryRequirements,
18040 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18042 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18044 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18046 VkResult result = allocator->AllocateMemory(
18047 *pVkMemoryRequirements,
18054 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18058 #if VMA_RECORDING_ENABLED
18059 if(allocator->GetRecorder() != VMA_NULL)
18061 allocator->GetRecorder()->RecordAllocateMemory(
18062 allocator->GetCurrentFrameIndex(),
18063 *pVkMemoryRequirements,
18069 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18071 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18079 const VkMemoryRequirements* pVkMemoryRequirements,
18081 size_t allocationCount,
18085 if(allocationCount == 0)
18090 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18092 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18094 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18096 VkResult result = allocator->AllocateMemory(
18097 *pVkMemoryRequirements,
18104 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18108 #if VMA_RECORDING_ENABLED
18109 if(allocator->GetRecorder() != VMA_NULL)
18111 allocator->GetRecorder()->RecordAllocateMemoryPages(
18112 allocator->GetCurrentFrameIndex(),
18113 *pVkMemoryRequirements,
18115 (uint64_t)allocationCount,
18120 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18122 for(
size_t i = 0; i < allocationCount; ++i)
18124 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18138 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18140 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18142 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18144 VkMemoryRequirements vkMemReq = {};
18145 bool requiresDedicatedAllocation =
false;
18146 bool prefersDedicatedAllocation =
false;
18147 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18148 requiresDedicatedAllocation,
18149 prefersDedicatedAllocation);
18151 VkResult result = allocator->AllocateMemory(
18153 requiresDedicatedAllocation,
18154 prefersDedicatedAllocation,
18159 VMA_SUBALLOCATION_TYPE_BUFFER,
18163 #if VMA_RECORDING_ENABLED
18164 if(allocator->GetRecorder() != VMA_NULL)
18166 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18167 allocator->GetCurrentFrameIndex(),
18169 requiresDedicatedAllocation,
18170 prefersDedicatedAllocation,
18176 if(pAllocationInfo && result == VK_SUCCESS)
18178 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18191 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18193 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18195 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18197 VkMemoryRequirements vkMemReq = {};
18198 bool requiresDedicatedAllocation =
false;
18199 bool prefersDedicatedAllocation =
false;
18200 allocator->GetImageMemoryRequirements(image, vkMemReq,
18201 requiresDedicatedAllocation, prefersDedicatedAllocation);
18203 VkResult result = allocator->AllocateMemory(
18205 requiresDedicatedAllocation,
18206 prefersDedicatedAllocation,
18211 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18215 #if VMA_RECORDING_ENABLED
18216 if(allocator->GetRecorder() != VMA_NULL)
18218 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18219 allocator->GetCurrentFrameIndex(),
18221 requiresDedicatedAllocation,
18222 prefersDedicatedAllocation,
18228 if(pAllocationInfo && result == VK_SUCCESS)
18230 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18240 VMA_ASSERT(allocator);
18242 if(allocation == VK_NULL_HANDLE)
18247 VMA_DEBUG_LOG(
"vmaFreeMemory");
18249 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18251 #if VMA_RECORDING_ENABLED
18252 if(allocator->GetRecorder() != VMA_NULL)
18254 allocator->GetRecorder()->RecordFreeMemory(
18255 allocator->GetCurrentFrameIndex(),
18260 allocator->FreeMemory(
18267 size_t allocationCount,
18270 if(allocationCount == 0)
18275 VMA_ASSERT(allocator);
18277 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18279 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18281 #if VMA_RECORDING_ENABLED
18282 if(allocator->GetRecorder() != VMA_NULL)
18284 allocator->GetRecorder()->RecordFreeMemoryPages(
18285 allocator->GetCurrentFrameIndex(),
18286 (uint64_t)allocationCount,
18291 allocator->FreeMemory(allocationCount, pAllocations);
18297 VkDeviceSize newSize)
18299 VMA_ASSERT(allocator && allocation);
18301 VMA_DEBUG_LOG(
"vmaResizeAllocation");
18303 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18305 return allocator->ResizeAllocation(allocation, newSize);
18313 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18315 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18317 #if VMA_RECORDING_ENABLED
18318 if(allocator->GetRecorder() != VMA_NULL)
18320 allocator->GetRecorder()->RecordGetAllocationInfo(
18321 allocator->GetCurrentFrameIndex(),
18326 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18333 VMA_ASSERT(allocator && allocation);
18335 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18337 #if VMA_RECORDING_ENABLED
18338 if(allocator->GetRecorder() != VMA_NULL)
18340 allocator->GetRecorder()->RecordTouchAllocation(
18341 allocator->GetCurrentFrameIndex(),
18346 return allocator->TouchAllocation(allocation);
18354 VMA_ASSERT(allocator && allocation);
18356 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18358 allocation->SetUserData(allocator, pUserData);
18360 #if VMA_RECORDING_ENABLED
18361 if(allocator->GetRecorder() != VMA_NULL)
18363 allocator->GetRecorder()->RecordSetAllocationUserData(
18364 allocator->GetCurrentFrameIndex(),
18375 VMA_ASSERT(allocator && pAllocation);
18377 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18379 allocator->CreateLostAllocation(pAllocation);
18381 #if VMA_RECORDING_ENABLED
18382 if(allocator->GetRecorder() != VMA_NULL)
18384 allocator->GetRecorder()->RecordCreateLostAllocation(
18385 allocator->GetCurrentFrameIndex(),
18396 VMA_ASSERT(allocator && allocation && ppData);
18398 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18400 VkResult res = allocator->Map(allocation, ppData);
18402 #if VMA_RECORDING_ENABLED
18403 if(allocator->GetRecorder() != VMA_NULL)
18405 allocator->GetRecorder()->RecordMapMemory(
18406 allocator->GetCurrentFrameIndex(),
18418 VMA_ASSERT(allocator && allocation);
18420 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18422 #if VMA_RECORDING_ENABLED
18423 if(allocator->GetRecorder() != VMA_NULL)
18425 allocator->GetRecorder()->RecordUnmapMemory(
18426 allocator->GetCurrentFrameIndex(),
18431 allocator->Unmap(allocation);
18436 VMA_ASSERT(allocator && allocation);
18438 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18440 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18442 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18444 #if VMA_RECORDING_ENABLED
18445 if(allocator->GetRecorder() != VMA_NULL)
18447 allocator->GetRecorder()->RecordFlushAllocation(
18448 allocator->GetCurrentFrameIndex(),
18449 allocation, offset, size);
18458 VMA_ASSERT(allocator && allocation);
18460 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18462 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18464 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18466 #if VMA_RECORDING_ENABLED
18467 if(allocator->GetRecorder() != VMA_NULL)
18469 allocator->GetRecorder()->RecordInvalidateAllocation(
18470 allocator->GetCurrentFrameIndex(),
18471 allocation, offset, size);
18480 uint32_t allocationCount,
18482 const VkDeviceSize* offsets,
18483 const VkDeviceSize* sizes)
18485 VMA_ASSERT(allocator);
18487 if(allocationCount == 0)
18492 VMA_ASSERT(allocations);
18494 VMA_DEBUG_LOG(
"vmaFlushAllocations");
18496 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18498 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
18500 #if VMA_RECORDING_ENABLED
18501 if(allocator->GetRecorder() != VMA_NULL)
18512 uint32_t allocationCount,
18514 const VkDeviceSize* offsets,
18515 const VkDeviceSize* sizes)
18517 VMA_ASSERT(allocator);
18519 if(allocationCount == 0)
18524 VMA_ASSERT(allocations);
18526 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
18528 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18530 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
18532 #if VMA_RECORDING_ENABLED
18533 if(allocator->GetRecorder() != VMA_NULL)
18544 VMA_ASSERT(allocator);
18546 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18548 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18550 return allocator->CheckCorruption(memoryTypeBits);
18556 size_t allocationCount,
18557 VkBool32* pAllocationsChanged,
18567 if(pDefragmentationInfo != VMA_NULL)
18581 if(res == VK_NOT_READY)
18594 VMA_ASSERT(allocator && pInfo && pContext);
18605 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18607 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18609 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18611 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18613 #if VMA_RECORDING_ENABLED
18614 if(allocator->GetRecorder() != VMA_NULL)
18616 allocator->GetRecorder()->RecordDefragmentationBegin(
18617 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18628 VMA_ASSERT(allocator);
18630 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18632 if(context != VK_NULL_HANDLE)
18634 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18636 #if VMA_RECORDING_ENABLED
18637 if(allocator->GetRecorder() != VMA_NULL)
18639 allocator->GetRecorder()->RecordDefragmentationEnd(
18640 allocator->GetCurrentFrameIndex(), context);
18644 return allocator->DefragmentationEnd(context);
18658 VMA_ASSERT(allocator);
18660 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
moveCount, pInfo->
pMoves));
18662 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18664 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18666 if(context == VK_NULL_HANDLE)
18672 return allocator->DefragmentationPassBegin(pInfo, context);
18678 VMA_ASSERT(allocator);
18680 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18681 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18683 if(context == VK_NULL_HANDLE)
18686 return allocator->DefragmentationPassEnd(context);
18694 VMA_ASSERT(allocator && allocation && buffer);
18696 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18698 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18700 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18706 VkDeviceSize allocationLocalOffset,
18710 VMA_ASSERT(allocator && allocation && buffer);
18712 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18714 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18716 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18724 VMA_ASSERT(allocator && allocation && image);
18726 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18728 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18730 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18736 VkDeviceSize allocationLocalOffset,
18740 VMA_ASSERT(allocator && allocation && image);
18742 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18744 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18746 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18751 const VkBufferCreateInfo* pBufferCreateInfo,
18757 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18759 if(pBufferCreateInfo->size == 0)
18761 return VK_ERROR_VALIDATION_FAILED_EXT;
18763 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18764 !allocator->m_UseKhrBufferDeviceAddress)
18766 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18767 return VK_ERROR_VALIDATION_FAILED_EXT;
18770 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18772 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18774 *pBuffer = VK_NULL_HANDLE;
18775 *pAllocation = VK_NULL_HANDLE;
18778 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18779 allocator->m_hDevice,
18781 allocator->GetAllocationCallbacks(),
18786 VkMemoryRequirements vkMemReq = {};
18787 bool requiresDedicatedAllocation =
false;
18788 bool prefersDedicatedAllocation =
false;
18789 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18790 requiresDedicatedAllocation, prefersDedicatedAllocation);
18793 res = allocator->AllocateMemory(
18795 requiresDedicatedAllocation,
18796 prefersDedicatedAllocation,
18798 pBufferCreateInfo->usage,
18800 *pAllocationCreateInfo,
18801 VMA_SUBALLOCATION_TYPE_BUFFER,
18805 #if VMA_RECORDING_ENABLED
18806 if(allocator->GetRecorder() != VMA_NULL)
18808 allocator->GetRecorder()->RecordCreateBuffer(
18809 allocator->GetCurrentFrameIndex(),
18810 *pBufferCreateInfo,
18811 *pAllocationCreateInfo,
18821 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18826 #if VMA_STATS_STRING_ENABLED
18827 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18829 if(pAllocationInfo != VMA_NULL)
18831 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18836 allocator->FreeMemory(
18839 *pAllocation = VK_NULL_HANDLE;
18840 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18841 *pBuffer = VK_NULL_HANDLE;
18844 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18845 *pBuffer = VK_NULL_HANDLE;
18856 VMA_ASSERT(allocator);
18858 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18863 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18865 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18867 #if VMA_RECORDING_ENABLED
18868 if(allocator->GetRecorder() != VMA_NULL)
18870 allocator->GetRecorder()->RecordDestroyBuffer(
18871 allocator->GetCurrentFrameIndex(),
18876 if(buffer != VK_NULL_HANDLE)
18878 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18881 if(allocation != VK_NULL_HANDLE)
18883 allocator->FreeMemory(
18891 const VkImageCreateInfo* pImageCreateInfo,
18897 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18899 if(pImageCreateInfo->extent.width == 0 ||
18900 pImageCreateInfo->extent.height == 0 ||
18901 pImageCreateInfo->extent.depth == 0 ||
18902 pImageCreateInfo->mipLevels == 0 ||
18903 pImageCreateInfo->arrayLayers == 0)
18905 return VK_ERROR_VALIDATION_FAILED_EXT;
18908 VMA_DEBUG_LOG(
"vmaCreateImage");
18910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18912 *pImage = VK_NULL_HANDLE;
18913 *pAllocation = VK_NULL_HANDLE;
18916 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18917 allocator->m_hDevice,
18919 allocator->GetAllocationCallbacks(),
18923 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18924 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18925 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18928 VkMemoryRequirements vkMemReq = {};
18929 bool requiresDedicatedAllocation =
false;
18930 bool prefersDedicatedAllocation =
false;
18931 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18932 requiresDedicatedAllocation, prefersDedicatedAllocation);
18934 res = allocator->AllocateMemory(
18936 requiresDedicatedAllocation,
18937 prefersDedicatedAllocation,
18941 *pAllocationCreateInfo,
18946 #if VMA_RECORDING_ENABLED
18947 if(allocator->GetRecorder() != VMA_NULL)
18949 allocator->GetRecorder()->RecordCreateImage(
18950 allocator->GetCurrentFrameIndex(),
18952 *pAllocationCreateInfo,
18962 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18967 #if VMA_STATS_STRING_ENABLED
18968 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18970 if(pAllocationInfo != VMA_NULL)
18972 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18977 allocator->FreeMemory(
18980 *pAllocation = VK_NULL_HANDLE;
18981 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18982 *pImage = VK_NULL_HANDLE;
18985 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18986 *pImage = VK_NULL_HANDLE;
18997 VMA_ASSERT(allocator);
18999 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19004 VMA_DEBUG_LOG(
"vmaDestroyImage");
19006 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19008 #if VMA_RECORDING_ENABLED
19009 if(allocator->GetRecorder() != VMA_NULL)
19011 allocator->GetRecorder()->RecordDestroyImage(
19012 allocator->GetCurrentFrameIndex(),
19017 if(image != VK_NULL_HANDLE)
19019 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19021 if(allocation != VK_NULL_HANDLE)
19023 allocator->FreeMemory(
19029 #endif // #ifdef VMA_IMPLEMENTATION