23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1894 #ifndef VMA_RECORDING_ENABLED
1895 #define VMA_RECORDING_ENABLED 0
1899 #define NOMINMAX // For windows.h
1902 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
1903 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
1904 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
1905 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
1906 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
1907 extern PFN_vkAllocateMemory vkAllocateMemory;
1908 extern PFN_vkFreeMemory vkFreeMemory;
1909 extern PFN_vkMapMemory vkMapMemory;
1910 extern PFN_vkUnmapMemory vkUnmapMemory;
1911 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
1912 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
1913 extern PFN_vkBindBufferMemory vkBindBufferMemory;
1914 extern PFN_vkBindImageMemory vkBindImageMemory;
1915 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
1916 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
1917 extern PFN_vkCreateBuffer vkCreateBuffer;
1918 extern PFN_vkDestroyBuffer vkDestroyBuffer;
1919 extern PFN_vkCreateImage vkCreateImage;
1920 extern PFN_vkDestroyImage vkDestroyImage;
1921 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
1922 #if VMA_VULKAN_VERSION >= 1001000
1923 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
1924 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
1925 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
1926 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
1927 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
1928 #endif // #if VMA_VULKAN_VERSION >= 1001000
1929 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
1932 #include <vulkan/vulkan.h>
1935 #if VMA_RECORDING_ENABLED
1937 #include <windows.h>
1939 #error VMA Recording functionality is not yet available for non-Windows platforms
1946 #if !defined(VMA_VULKAN_VERSION)
1947 #if defined(VK_VERSION_1_2)
1948 #define VMA_VULKAN_VERSION 1002000
1949 #elif defined(VK_VERSION_1_1)
1950 #define VMA_VULKAN_VERSION 1001000
1952 #define VMA_VULKAN_VERSION 1000000
1956 #if !defined(VMA_DEDICATED_ALLOCATION)
1957 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1958 #define VMA_DEDICATED_ALLOCATION 1
1960 #define VMA_DEDICATED_ALLOCATION 0
1964 #if !defined(VMA_BIND_MEMORY2)
1965 #if VK_KHR_bind_memory2
1966 #define VMA_BIND_MEMORY2 1
1968 #define VMA_BIND_MEMORY2 0
1972 #if !defined(VMA_MEMORY_BUDGET)
1973 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1974 #define VMA_MEMORY_BUDGET 1
1976 #define VMA_MEMORY_BUDGET 0
1981 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
1982 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
1983 #define VMA_BUFFER_DEVICE_ADDRESS 1
1985 #define VMA_BUFFER_DEVICE_ADDRESS 0
1994 #ifndef VMA_CALL_PRE
1995 #define VMA_CALL_PRE
1997 #ifndef VMA_CALL_POST
1998 #define VMA_CALL_POST
2012 #ifndef VMA_LEN_IF_NOT_NULL
2013 #define VMA_LEN_IF_NOT_NULL(len)
2018 #ifndef VMA_NULLABLE
2020 #define VMA_NULLABLE _Nullable
2022 #define VMA_NULLABLE
2028 #ifndef VMA_NOT_NULL
2030 #define VMA_NOT_NULL _Nonnull
2032 #define VMA_NOT_NULL
2038 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2039 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2040 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2042 #define VMA_NOT_NULL_NON_DISPATCHABLE
2046 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2047 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2048 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2050 #define VMA_NULLABLE_NON_DISPATCHABLE
2068 uint32_t memoryType,
2069 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2071 void* VMA_NULLABLE pUserData);
2075 uint32_t memoryType,
2076 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2078 void* VMA_NULLABLE pUserData);
2218 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2219 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2220 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2222 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2223 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2224 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2226 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2227 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2317 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2390 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2398 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2408 uint32_t memoryTypeIndex,
2409 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2421 uint32_t frameIndex);
2517 #ifndef VMA_STATS_STRING_ENABLED
2518 #define VMA_STATS_STRING_ENABLED 1
2521 #if VMA_STATS_STRING_ENABLED
2528 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2529 VkBool32 detailedMap);
2533 char* VMA_NULLABLE pStatsString);
2535 #endif // #if VMA_STATS_STRING_ENABLED
2787 uint32_t memoryTypeBits,
2789 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2805 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2807 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2823 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2825 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2969 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
2997 size_t* VMA_NULLABLE pLostAllocationCount);
3024 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3034 const char* VMA_NULLABLE pName);
3117 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3143 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3145 size_t allocationCount,
3146 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3147 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3157 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3165 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3190 size_t allocationCount,
3191 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3203 VkDeviceSize newSize);
3260 void* VMA_NULLABLE pUserData);
3317 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3355 VkDeviceSize offset,
3382 VkDeviceSize offset,
3401 uint32_t allocationCount,
3402 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3403 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3404 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3422 uint32_t allocationCount,
3423 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3424 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3425 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3504 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3538 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3676 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3677 size_t allocationCount,
3678 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3697 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3712 VkDeviceSize allocationLocalOffset,
3713 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3714 const void* VMA_NULLABLE pNext);
3731 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3746 VkDeviceSize allocationLocalOffset,
3747 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3748 const void* VMA_NULLABLE pNext);
3778 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3780 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3797 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3803 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3805 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3822 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
3829 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3832 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3833 #define VMA_IMPLEMENTATION
3836 #ifdef VMA_IMPLEMENTATION
3837 #undef VMA_IMPLEMENTATION
3857 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3858 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3867 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
3868 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
3880 #if VMA_USE_STL_CONTAINERS
3881 #define VMA_USE_STL_VECTOR 1
3882 #define VMA_USE_STL_UNORDERED_MAP 1
3883 #define VMA_USE_STL_LIST 1
3886 #ifndef VMA_USE_STL_SHARED_MUTEX
3888 #if __cplusplus >= 201703L
3889 #define VMA_USE_STL_SHARED_MUTEX 1
3893 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3894 #define VMA_USE_STL_SHARED_MUTEX 1
3896 #define VMA_USE_STL_SHARED_MUTEX 0
3904 #if VMA_USE_STL_VECTOR
3908 #if VMA_USE_STL_UNORDERED_MAP
3909 #include <unordered_map>
3912 #if VMA_USE_STL_LIST
3921 #include <algorithm>
3926 #define VMA_NULL nullptr
3929 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3931 void *aligned_alloc(
size_t alignment,
size_t size)
3934 if(alignment <
sizeof(
void*))
3936 alignment =
sizeof(
void*);
3939 return memalign(alignment, size);
3941 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3943 void *aligned_alloc(
size_t alignment,
size_t size)
3946 if(alignment <
sizeof(
void*))
3948 alignment =
sizeof(
void*);
3952 if(posix_memalign(&pointer, alignment, size) == 0)
3966 #define VMA_ASSERT(expr)
3968 #define VMA_ASSERT(expr) assert(expr)
3974 #ifndef VMA_HEAVY_ASSERT
3976 #define VMA_HEAVY_ASSERT(expr)
3978 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3982 #ifndef VMA_ALIGN_OF
3983 #define VMA_ALIGN_OF(type) (__alignof(type))
3986 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3988 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3990 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3994 #ifndef VMA_SYSTEM_FREE
3996 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3998 #define VMA_SYSTEM_FREE(ptr) free(ptr)
4003 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4007 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4011 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4015 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4018 #ifndef VMA_DEBUG_LOG
4019 #define VMA_DEBUG_LOG(format, ...)
4029 #if VMA_STATS_STRING_ENABLED
4030 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4032 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4034 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4036 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4038 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4040 snprintf(outStr, strLen,
"%p", ptr);
4048 void Lock() { m_Mutex.lock(); }
4049 void Unlock() { m_Mutex.unlock(); }
4050 bool TryLock() {
return m_Mutex.try_lock(); }
4054 #define VMA_MUTEX VmaMutex
4058 #ifndef VMA_RW_MUTEX
4059 #if VMA_USE_STL_SHARED_MUTEX
4061 #include <shared_mutex>
4065 void LockRead() { m_Mutex.lock_shared(); }
4066 void UnlockRead() { m_Mutex.unlock_shared(); }
4067 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4068 void LockWrite() { m_Mutex.lock(); }
4069 void UnlockWrite() { m_Mutex.unlock(); }
4070 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4072 std::shared_mutex m_Mutex;
4074 #define VMA_RW_MUTEX VmaRWMutex
4075 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4081 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4082 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4083 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4084 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4085 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4086 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4087 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4091 #define VMA_RW_MUTEX VmaRWMutex
4097 void LockRead() { m_Mutex.Lock(); }
4098 void UnlockRead() { m_Mutex.Unlock(); }
4099 bool TryLockRead() {
return m_Mutex.TryLock(); }
4100 void LockWrite() { m_Mutex.Lock(); }
4101 void UnlockWrite() { m_Mutex.Unlock(); }
4102 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4106 #define VMA_RW_MUTEX VmaRWMutex
4107 #endif // #if VMA_USE_STL_SHARED_MUTEX
4108 #endif // #ifndef VMA_RW_MUTEX
4113 #ifndef VMA_ATOMIC_UINT32
4115 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4118 #ifndef VMA_ATOMIC_UINT64
4120 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4123 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4128 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4131 #ifndef VMA_DEBUG_ALIGNMENT
4136 #define VMA_DEBUG_ALIGNMENT (1)
4139 #ifndef VMA_DEBUG_MARGIN
4144 #define VMA_DEBUG_MARGIN (0)
4147 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4152 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4155 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4161 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4164 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4169 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4172 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4177 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4180 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4181 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4185 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4186 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4190 #ifndef VMA_CLASS_NO_COPY
4191 #define VMA_CLASS_NO_COPY(className) \
4193 className(const className&) = delete; \
4194 className& operator=(const className&) = delete;
4197 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4200 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4202 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4203 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4211 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4212 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4213 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4215 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4217 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4218 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4221 static inline uint32_t VmaCountBitsSet(uint32_t v)
4223 uint32_t c = v - ((v >> 1) & 0x55555555);
4224 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4225 c = ((c >> 4) + c) & 0x0F0F0F0F;
4226 c = ((c >> 8) + c) & 0x00FF00FF;
4227 c = ((c >> 16) + c) & 0x0000FFFF;
4233 template <
typename T>
4234 static inline T VmaAlignUp(T val, T align)
4236 return (val + align - 1) / align * align;
4240 template <
typename T>
4241 static inline T VmaAlignDown(T val, T align)
4243 return val / align * align;
4247 template <
typename T>
4248 static inline T VmaRoundDiv(T x, T y)
4250 return (x + (y / (T)2)) / y;
4258 template <
typename T>
4259 inline bool VmaIsPow2(T x)
4261 return (x & (x-1)) == 0;
4265 static inline uint32_t VmaNextPow2(uint32_t v)
4276 static inline uint64_t VmaNextPow2(uint64_t v)
4290 static inline uint32_t VmaPrevPow2(uint32_t v)
4300 static inline uint64_t VmaPrevPow2(uint64_t v)
4312 static inline bool VmaStrIsEmpty(
const char* pStr)
4314 return pStr == VMA_NULL || *pStr ==
'\0';
4317 #if VMA_STATS_STRING_ENABLED
4319 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4335 #endif // #if VMA_STATS_STRING_ENABLED
4339 template<
typename Iterator,
typename Compare>
4340 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4342 Iterator centerValue = end; --centerValue;
4343 Iterator insertIndex = beg;
4344 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4346 if(cmp(*memTypeIndex, *centerValue))
4348 if(insertIndex != memTypeIndex)
4350 VMA_SWAP(*memTypeIndex, *insertIndex);
4355 if(insertIndex != centerValue)
4357 VMA_SWAP(*insertIndex, *centerValue);
4362 template<
typename Iterator,
typename Compare>
4363 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4367 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4368 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4369 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4373 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4375 #endif // #ifndef VMA_SORT
4384 static inline bool VmaBlocksOnSamePage(
4385 VkDeviceSize resourceAOffset,
4386 VkDeviceSize resourceASize,
4387 VkDeviceSize resourceBOffset,
4388 VkDeviceSize pageSize)
4390 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4391 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4392 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4393 VkDeviceSize resourceBStart = resourceBOffset;
4394 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4395 return resourceAEndPage == resourceBStartPage;
4398 enum VmaSuballocationType
4400 VMA_SUBALLOCATION_TYPE_FREE = 0,
4401 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4402 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4403 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4404 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4405 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4406 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4415 static inline bool VmaIsBufferImageGranularityConflict(
4416 VmaSuballocationType suballocType1,
4417 VmaSuballocationType suballocType2)
4419 if(suballocType1 > suballocType2)
4421 VMA_SWAP(suballocType1, suballocType2);
4424 switch(suballocType1)
4426 case VMA_SUBALLOCATION_TYPE_FREE:
4428 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4430 case VMA_SUBALLOCATION_TYPE_BUFFER:
4432 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4433 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4434 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4436 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4437 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4438 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4439 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4441 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4442 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4450 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4452 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4453 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4454 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4455 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4457 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4464 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4466 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4467 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4468 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4469 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4471 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4484 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4486 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4487 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4488 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4489 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4495 VMA_CLASS_NO_COPY(VmaMutexLock)
4497 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4498 m_pMutex(useMutex ? &mutex : VMA_NULL)
4499 {
if(m_pMutex) { m_pMutex->Lock(); } }
4501 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4503 VMA_MUTEX* m_pMutex;
4507 struct VmaMutexLockRead
4509 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4511 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4512 m_pMutex(useMutex ? &mutex : VMA_NULL)
4513 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4514 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4516 VMA_RW_MUTEX* m_pMutex;
4520 struct VmaMutexLockWrite
4522 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4524 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4525 m_pMutex(useMutex ? &mutex : VMA_NULL)
4526 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4527 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4529 VMA_RW_MUTEX* m_pMutex;
4532 #if VMA_DEBUG_GLOBAL_MUTEX
4533 static VMA_MUTEX gDebugGlobalMutex;
4534 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4536 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4540 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4551 template <
typename CmpLess,
typename IterT,
typename KeyT>
4552 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4554 size_t down = 0, up = (end - beg);
4557 const size_t mid = (down + up) / 2;
4558 if(cmp(*(beg+mid), key))
4570 template<
typename CmpLess,
typename IterT,
typename KeyT>
4571 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4573 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4574 beg, end, value, cmp);
4576 (!cmp(*it, value) && !cmp(value, *it)))
4588 template<
typename T>
4589 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4591 for(uint32_t i = 0; i < count; ++i)
4593 const T iPtr = arr[i];
4594 if(iPtr == VMA_NULL)
4598 for(uint32_t j = i + 1; j < count; ++j)
4609 template<
typename MainT,
typename NewT>
4610 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4612 newStruct->pNext = mainStruct->pNext;
4613 mainStruct->pNext = newStruct;
4619 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4621 if((pAllocationCallbacks != VMA_NULL) &&
4622 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4624 return (*pAllocationCallbacks->pfnAllocation)(
4625 pAllocationCallbacks->pUserData,
4628 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4632 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4636 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4638 if((pAllocationCallbacks != VMA_NULL) &&
4639 (pAllocationCallbacks->pfnFree != VMA_NULL))
4641 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4645 VMA_SYSTEM_FREE(ptr);
4649 template<
typename T>
4650 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4652 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4655 template<
typename T>
4656 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4658 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4661 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4663 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4665 template<
typename T>
4666 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4669 VmaFree(pAllocationCallbacks, ptr);
4672 template<
typename T>
4673 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4677 for(
size_t i = count; i--; )
4681 VmaFree(pAllocationCallbacks, ptr);
4685 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4687 if(srcStr != VMA_NULL)
4689 const size_t len = strlen(srcStr);
4690 char*
const result = vma_new_array(allocs,
char, len + 1);
4691 memcpy(result, srcStr, len + 1);
4700 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4704 const size_t len = strlen(str);
4705 vma_delete_array(allocs, str, len + 1);
4710 template<
typename T>
4711 class VmaStlAllocator
4714 const VkAllocationCallbacks*
const m_pCallbacks;
4715 typedef T value_type;
4717 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4718 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4720 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4721 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4723 template<
typename U>
4724 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4726 return m_pCallbacks == rhs.m_pCallbacks;
4728 template<
typename U>
4729 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4731 return m_pCallbacks != rhs.m_pCallbacks;
4734 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4737 #if VMA_USE_STL_VECTOR
4739 #define VmaVector std::vector
4741 template<
typename T,
typename allocatorT>
4742 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4744 vec.insert(vec.begin() + index, item);
4747 template<
typename T,
typename allocatorT>
4748 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4750 vec.erase(vec.begin() + index);
4753 #else // #if VMA_USE_STL_VECTOR
4758 template<
typename T,
typename AllocatorT>
4762 typedef T value_type;
4764 VmaVector(
const AllocatorT& allocator) :
4765 m_Allocator(allocator),
4772 VmaVector(
size_t count,
const AllocatorT& allocator) :
4773 m_Allocator(allocator),
4774 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4782 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4783 : VmaVector(count, allocator) {}
4785 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4786 m_Allocator(src.m_Allocator),
4787 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4788 m_Count(src.m_Count),
4789 m_Capacity(src.m_Count)
4793 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4799 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4802 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4806 resize(rhs.m_Count);
4809 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4815 bool empty()
const {
return m_Count == 0; }
4816 size_t size()
const {
return m_Count; }
4817 T* data() {
return m_pArray; }
4818 const T* data()
const {
return m_pArray; }
4820 T& operator[](
size_t index)
4822 VMA_HEAVY_ASSERT(index < m_Count);
4823 return m_pArray[index];
4825 const T& operator[](
size_t index)
const
4827 VMA_HEAVY_ASSERT(index < m_Count);
4828 return m_pArray[index];
4833 VMA_HEAVY_ASSERT(m_Count > 0);
4836 const T& front()
const
4838 VMA_HEAVY_ASSERT(m_Count > 0);
4843 VMA_HEAVY_ASSERT(m_Count > 0);
4844 return m_pArray[m_Count - 1];
4846 const T& back()
const
4848 VMA_HEAVY_ASSERT(m_Count > 0);
4849 return m_pArray[m_Count - 1];
4852 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4854 newCapacity = VMA_MAX(newCapacity, m_Count);
4856 if((newCapacity < m_Capacity) && !freeMemory)
4858 newCapacity = m_Capacity;
4861 if(newCapacity != m_Capacity)
4863 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4866 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4868 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4869 m_Capacity = newCapacity;
4870 m_pArray = newArray;
4874 void resize(
size_t newCount,
bool freeMemory =
false)
4876 size_t newCapacity = m_Capacity;
4877 if(newCount > m_Capacity)
4879 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4883 newCapacity = newCount;
4886 if(newCapacity != m_Capacity)
4888 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4889 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4890 if(elementsToCopy != 0)
4892 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4894 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4895 m_Capacity = newCapacity;
4896 m_pArray = newArray;
4902 void clear(
bool freeMemory =
false)
4904 resize(0, freeMemory);
4907 void insert(
size_t index,
const T& src)
4909 VMA_HEAVY_ASSERT(index <= m_Count);
4910 const size_t oldCount = size();
4911 resize(oldCount + 1);
4912 if(index < oldCount)
4914 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4916 m_pArray[index] = src;
4919 void remove(
size_t index)
4921 VMA_HEAVY_ASSERT(index < m_Count);
4922 const size_t oldCount = size();
4923 if(index < oldCount - 1)
4925 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4927 resize(oldCount - 1);
4930 void push_back(
const T& src)
4932 const size_t newIndex = size();
4933 resize(newIndex + 1);
4934 m_pArray[newIndex] = src;
4939 VMA_HEAVY_ASSERT(m_Count > 0);
4943 void push_front(
const T& src)
4950 VMA_HEAVY_ASSERT(m_Count > 0);
4954 typedef T* iterator;
4956 iterator begin() {
return m_pArray; }
4957 iterator end() {
return m_pArray + m_Count; }
4960 AllocatorT m_Allocator;
4966 template<
typename T,
typename allocatorT>
4967 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4969 vec.insert(index, item);
4972 template<
typename T,
typename allocatorT>
4973 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4978 #endif // #if VMA_USE_STL_VECTOR
4980 template<
typename CmpLess,
typename VectorT>
4981 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4983 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4985 vector.data() + vector.size(),
4987 CmpLess()) - vector.data();
4988 VmaVectorInsert(vector, indexToInsert, value);
4989 return indexToInsert;
4992 template<
typename CmpLess,
typename VectorT>
4993 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4996 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5001 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5003 size_t indexToRemove = it - vector.begin();
5004 VmaVectorRemove(vector, indexToRemove);
5021 template<
typename T,
typename AllocatorT,
size_t N>
5022 class VmaSmallVector
5025 typedef T value_type;
5027 VmaSmallVector(
const AllocatorT& allocator) :
5029 m_DynamicArray(allocator)
5032 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5034 m_DynamicArray(count > N ? count : 0, allocator)
5037 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5038 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5039 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5040 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5042 bool empty()
const {
return m_Count == 0; }
5043 size_t size()
const {
return m_Count; }
5044 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5045 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5047 T& operator[](
size_t index)
5049 VMA_HEAVY_ASSERT(index < m_Count);
5050 return data()[index];
5052 const T& operator[](
size_t index)
const
5054 VMA_HEAVY_ASSERT(index < m_Count);
5055 return data()[index];
5060 VMA_HEAVY_ASSERT(m_Count > 0);
5063 const T& front()
const
5065 VMA_HEAVY_ASSERT(m_Count > 0);
5070 VMA_HEAVY_ASSERT(m_Count > 0);
5071 return data()[m_Count - 1];
5073 const T& back()
const
5075 VMA_HEAVY_ASSERT(m_Count > 0);
5076 return data()[m_Count - 1];
5079 void resize(
size_t newCount,
bool freeMemory =
false)
5081 if(newCount > N && m_Count > N)
5084 m_DynamicArray.resize(newCount, freeMemory);
5086 else if(newCount > N && m_Count <= N)
5089 m_DynamicArray.resize(newCount, freeMemory);
5092 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5095 else if(newCount <= N && m_Count > N)
5100 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5102 m_DynamicArray.resize(0, freeMemory);
5111 void clear(
bool freeMemory =
false)
5113 m_DynamicArray.clear(freeMemory);
5117 void insert(
size_t index,
const T& src)
5119 VMA_HEAVY_ASSERT(index <= m_Count);
5120 const size_t oldCount = size();
5121 resize(oldCount + 1);
5122 T*
const dataPtr = data();
5123 if(index < oldCount)
5126 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5128 dataPtr[index] = src;
5131 void remove(
size_t index)
5133 VMA_HEAVY_ASSERT(index < m_Count);
5134 const size_t oldCount = size();
5135 if(index < oldCount - 1)
5138 T*
const dataPtr = data();
5139 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5141 resize(oldCount - 1);
5144 void push_back(
const T& src)
5146 const size_t newIndex = size();
5147 resize(newIndex + 1);
5148 data()[newIndex] = src;
5153 VMA_HEAVY_ASSERT(m_Count > 0);
5157 void push_front(
const T& src)
5164 VMA_HEAVY_ASSERT(m_Count > 0);
5168 typedef T* iterator;
5170 iterator begin() {
return data(); }
5171 iterator end() {
return data() + m_Count; }
5176 VmaVector<T, AllocatorT> m_DynamicArray;
5187 template<
typename T>
5188 class VmaPoolAllocator
5190 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5192 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5193 ~VmaPoolAllocator();
5194 template<
typename... Types> T* Alloc(Types... args);
5200 uint32_t NextFreeIndex;
5201 alignas(T)
char Value[
sizeof(T)];
5208 uint32_t FirstFreeIndex;
5211 const VkAllocationCallbacks* m_pAllocationCallbacks;
5212 const uint32_t m_FirstBlockCapacity;
5213 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5215 ItemBlock& CreateNewBlock();
5218 template<
typename T>
5219 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5220 m_pAllocationCallbacks(pAllocationCallbacks),
5221 m_FirstBlockCapacity(firstBlockCapacity),
5222 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5224 VMA_ASSERT(m_FirstBlockCapacity > 1);
5227 template<
typename T>
5228 VmaPoolAllocator<T>::~VmaPoolAllocator()
5230 for(
size_t i = m_ItemBlocks.size(); i--; )
5231 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5232 m_ItemBlocks.clear();
5235 template<
typename T>
5236 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5238 for(
size_t i = m_ItemBlocks.size(); i--; )
5240 ItemBlock& block = m_ItemBlocks[i];
5242 if(block.FirstFreeIndex != UINT32_MAX)
5244 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5245 block.FirstFreeIndex = pItem->NextFreeIndex;
5246 T* result = (T*)&pItem->Value;
5247 new(result)T(std::forward<Types>(args)...);
5253 ItemBlock& newBlock = CreateNewBlock();
5254 Item*
const pItem = &newBlock.pItems[0];
5255 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5256 T* result = (T*)&pItem->Value;
5257 new(result)T(std::forward<Types>(args)...);
5261 template<
typename T>
5262 void VmaPoolAllocator<T>::Free(T* ptr)
5265 for(
size_t i = m_ItemBlocks.size(); i--; )
5267 ItemBlock& block = m_ItemBlocks[i];
5271 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5274 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5277 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5278 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5279 block.FirstFreeIndex = index;
5283 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5286 template<
typename T>
5287 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5289 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5290 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5292 const ItemBlock newBlock = {
5293 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5297 m_ItemBlocks.push_back(newBlock);
5300 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5301 newBlock.pItems[i].NextFreeIndex = i + 1;
5302 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5303 return m_ItemBlocks.back();
5309 #if VMA_USE_STL_LIST
5311 #define VmaList std::list
5313 #else // #if VMA_USE_STL_LIST
5315 template<
typename T>
5324 template<
typename T>
5327 VMA_CLASS_NO_COPY(VmaRawList)
5329 typedef VmaListItem<T> ItemType;
5331 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5335 size_t GetCount()
const {
return m_Count; }
5336 bool IsEmpty()
const {
return m_Count == 0; }
5338 ItemType* Front() {
return m_pFront; }
5339 const ItemType* Front()
const {
return m_pFront; }
5340 ItemType* Back() {
return m_pBack; }
5341 const ItemType* Back()
const {
return m_pBack; }
5343 ItemType* PushBack();
5344 ItemType* PushFront();
5345 ItemType* PushBack(
const T& value);
5346 ItemType* PushFront(
const T& value);
5351 ItemType* InsertBefore(ItemType* pItem);
5353 ItemType* InsertAfter(ItemType* pItem);
5355 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5356 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5358 void Remove(ItemType* pItem);
5361 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5362 VmaPoolAllocator<ItemType> m_ItemAllocator;
5368 template<
typename T>
5369 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5370 m_pAllocationCallbacks(pAllocationCallbacks),
5371 m_ItemAllocator(pAllocationCallbacks, 128),
5378 template<
typename T>
5379 VmaRawList<T>::~VmaRawList()
5385 template<
typename T>
5386 void VmaRawList<T>::Clear()
5388 if(IsEmpty() ==
false)
5390 ItemType* pItem = m_pBack;
5391 while(pItem != VMA_NULL)
5393 ItemType*
const pPrevItem = pItem->pPrev;
5394 m_ItemAllocator.Free(pItem);
5397 m_pFront = VMA_NULL;
5403 template<
typename T>
5404 VmaListItem<T>* VmaRawList<T>::PushBack()
5406 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5407 pNewItem->pNext = VMA_NULL;
5410 pNewItem->pPrev = VMA_NULL;
5411 m_pFront = pNewItem;
5417 pNewItem->pPrev = m_pBack;
5418 m_pBack->pNext = pNewItem;
5425 template<
typename T>
5426 VmaListItem<T>* VmaRawList<T>::PushFront()
5428 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5429 pNewItem->pPrev = VMA_NULL;
5432 pNewItem->pNext = VMA_NULL;
5433 m_pFront = pNewItem;
5439 pNewItem->pNext = m_pFront;
5440 m_pFront->pPrev = pNewItem;
5441 m_pFront = pNewItem;
5447 template<
typename T>
5448 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5450 ItemType*
const pNewItem = PushBack();
5451 pNewItem->Value = value;
5455 template<
typename T>
5456 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5458 ItemType*
const pNewItem = PushFront();
5459 pNewItem->Value = value;
5463 template<
typename T>
5464 void VmaRawList<T>::PopBack()
5466 VMA_HEAVY_ASSERT(m_Count > 0);
5467 ItemType*
const pBackItem = m_pBack;
5468 ItemType*
const pPrevItem = pBackItem->pPrev;
5469 if(pPrevItem != VMA_NULL)
5471 pPrevItem->pNext = VMA_NULL;
5473 m_pBack = pPrevItem;
5474 m_ItemAllocator.Free(pBackItem);
5478 template<
typename T>
5479 void VmaRawList<T>::PopFront()
5481 VMA_HEAVY_ASSERT(m_Count > 0);
5482 ItemType*
const pFrontItem = m_pFront;
5483 ItemType*
const pNextItem = pFrontItem->pNext;
5484 if(pNextItem != VMA_NULL)
5486 pNextItem->pPrev = VMA_NULL;
5488 m_pFront = pNextItem;
5489 m_ItemAllocator.Free(pFrontItem);
5493 template<
typename T>
5494 void VmaRawList<T>::Remove(ItemType* pItem)
5496 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5497 VMA_HEAVY_ASSERT(m_Count > 0);
5499 if(pItem->pPrev != VMA_NULL)
5501 pItem->pPrev->pNext = pItem->pNext;
5505 VMA_HEAVY_ASSERT(m_pFront == pItem);
5506 m_pFront = pItem->pNext;
5509 if(pItem->pNext != VMA_NULL)
5511 pItem->pNext->pPrev = pItem->pPrev;
5515 VMA_HEAVY_ASSERT(m_pBack == pItem);
5516 m_pBack = pItem->pPrev;
5519 m_ItemAllocator.Free(pItem);
5523 template<
typename T>
5524 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5526 if(pItem != VMA_NULL)
5528 ItemType*
const prevItem = pItem->pPrev;
5529 ItemType*
const newItem = m_ItemAllocator.Alloc();
5530 newItem->pPrev = prevItem;
5531 newItem->pNext = pItem;
5532 pItem->pPrev = newItem;
5533 if(prevItem != VMA_NULL)
5535 prevItem->pNext = newItem;
5539 VMA_HEAVY_ASSERT(m_pFront == pItem);
5549 template<
typename T>
5550 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5552 if(pItem != VMA_NULL)
5554 ItemType*
const nextItem = pItem->pNext;
5555 ItemType*
const newItem = m_ItemAllocator.Alloc();
5556 newItem->pNext = nextItem;
5557 newItem->pPrev = pItem;
5558 pItem->pNext = newItem;
5559 if(nextItem != VMA_NULL)
5561 nextItem->pPrev = newItem;
5565 VMA_HEAVY_ASSERT(m_pBack == pItem);
5575 template<
typename T>
5576 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5578 ItemType*
const newItem = InsertBefore(pItem);
5579 newItem->Value = value;
5583 template<
typename T>
5584 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5586 ItemType*
const newItem = InsertAfter(pItem);
5587 newItem->Value = value;
5591 template<
typename T,
typename AllocatorT>
5594 VMA_CLASS_NO_COPY(VmaList)
5605 T& operator*()
const
5607 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5608 return m_pItem->Value;
5610 T* operator->()
const
5612 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5613 return &m_pItem->Value;
5616 iterator& operator++()
5618 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5619 m_pItem = m_pItem->pNext;
5622 iterator& operator--()
5624 if(m_pItem != VMA_NULL)
5626 m_pItem = m_pItem->pPrev;
5630 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5631 m_pItem = m_pList->Back();
5636 iterator operator++(
int)
5638 iterator result = *
this;
5642 iterator operator--(
int)
5644 iterator result = *
this;
5649 bool operator==(
const iterator& rhs)
const
5651 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5652 return m_pItem == rhs.m_pItem;
5654 bool operator!=(
const iterator& rhs)
const
5656 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5657 return m_pItem != rhs.m_pItem;
5661 VmaRawList<T>* m_pList;
5662 VmaListItem<T>* m_pItem;
5664 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5670 friend class VmaList<T, AllocatorT>;
5673 class const_iterator
5682 const_iterator(
const iterator& src) :
5683 m_pList(src.m_pList),
5684 m_pItem(src.m_pItem)
5688 const T& operator*()
const
5690 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5691 return m_pItem->Value;
5693 const T* operator->()
const
5695 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5696 return &m_pItem->Value;
5699 const_iterator& operator++()
5701 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5702 m_pItem = m_pItem->pNext;
5705 const_iterator& operator--()
5707 if(m_pItem != VMA_NULL)
5709 m_pItem = m_pItem->pPrev;
5713 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5714 m_pItem = m_pList->Back();
5719 const_iterator operator++(
int)
5721 const_iterator result = *
this;
5725 const_iterator operator--(
int)
5727 const_iterator result = *
this;
5732 bool operator==(
const const_iterator& rhs)
const
5734 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5735 return m_pItem == rhs.m_pItem;
5737 bool operator!=(
const const_iterator& rhs)
const
5739 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5740 return m_pItem != rhs.m_pItem;
5744 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5750 const VmaRawList<T>* m_pList;
5751 const VmaListItem<T>* m_pItem;
5753 friend class VmaList<T, AllocatorT>;
5756 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5758 bool empty()
const {
return m_RawList.IsEmpty(); }
5759 size_t size()
const {
return m_RawList.GetCount(); }
5761 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5762 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5764 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5765 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5767 void clear() { m_RawList.Clear(); }
5768 void push_back(
const T& value) { m_RawList.PushBack(value); }
5769 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5770 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5773 VmaRawList<T> m_RawList;
5776 #endif // #if VMA_USE_STL_LIST
5784 #if VMA_USE_STL_UNORDERED_MAP
5786 #define VmaPair std::pair
5788 #define VMA_MAP_TYPE(KeyT, ValueT) \
5789 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5791 #else // #if VMA_USE_STL_UNORDERED_MAP
5793 template<
typename T1,
typename T2>
5799 VmaPair() : first(), second() { }
5800 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5806 template<
typename KeyT,
typename ValueT>
5810 typedef VmaPair<KeyT, ValueT> PairType;
5811 typedef PairType* iterator;
5813 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5815 iterator begin() {
return m_Vector.begin(); }
5816 iterator end() {
return m_Vector.end(); }
5818 void insert(
const PairType& pair);
5819 iterator find(
const KeyT& key);
5820 void erase(iterator it);
5823 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5826 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5828 template<
typename FirstT,
typename SecondT>
5829 struct VmaPairFirstLess
5831 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5833 return lhs.first < rhs.first;
5835 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5837 return lhs.first < rhsFirst;
5841 template<
typename KeyT,
typename ValueT>
5842 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5844 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5846 m_Vector.data() + m_Vector.size(),
5848 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5849 VmaVectorInsert(m_Vector, indexToInsert, pair);
5852 template<
typename KeyT,
typename ValueT>
5853 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5855 PairType* it = VmaBinaryFindFirstNotLess(
5857 m_Vector.data() + m_Vector.size(),
5859 VmaPairFirstLess<KeyT, ValueT>());
5860 if((it != m_Vector.end()) && (it->first == key))
5866 return m_Vector.end();
5870 template<
typename KeyT,
typename ValueT>
5871 void VmaMap<KeyT, ValueT>::erase(iterator it)
5873 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5876 #endif // #if VMA_USE_STL_UNORDERED_MAP
5882 class VmaDeviceMemoryBlock;
5884 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5886 struct VmaAllocation_T
5889 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5893 FLAG_USER_DATA_STRING = 0x01,
5897 enum ALLOCATION_TYPE
5899 ALLOCATION_TYPE_NONE,
5900 ALLOCATION_TYPE_BLOCK,
5901 ALLOCATION_TYPE_DEDICATED,
5908 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5911 m_pUserData{VMA_NULL},
5912 m_LastUseFrameIndex{currentFrameIndex},
5913 m_MemoryTypeIndex{0},
5914 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5915 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5917 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5919 #if VMA_STATS_STRING_ENABLED
5920 m_CreationFrameIndex = currentFrameIndex;
5921 m_BufferImageUsage = 0;
5927 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5930 VMA_ASSERT(m_pUserData == VMA_NULL);
5933 void InitBlockAllocation(
5934 VmaDeviceMemoryBlock* block,
5935 VkDeviceSize offset,
5936 VkDeviceSize alignment,
5938 uint32_t memoryTypeIndex,
5939 VmaSuballocationType suballocationType,
5943 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5944 VMA_ASSERT(block != VMA_NULL);
5945 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5946 m_Alignment = alignment;
5948 m_MemoryTypeIndex = memoryTypeIndex;
5949 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5950 m_SuballocationType = (uint8_t)suballocationType;
5951 m_BlockAllocation.m_Block = block;
5952 m_BlockAllocation.m_Offset = offset;
5953 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5958 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5959 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5960 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5961 m_MemoryTypeIndex = 0;
5962 m_BlockAllocation.m_Block = VMA_NULL;
5963 m_BlockAllocation.m_Offset = 0;
5964 m_BlockAllocation.m_CanBecomeLost =
true;
5967 void ChangeBlockAllocation(
5969 VmaDeviceMemoryBlock* block,
5970 VkDeviceSize offset);
5972 void ChangeOffset(VkDeviceSize newOffset);
5975 void InitDedicatedAllocation(
5976 uint32_t memoryTypeIndex,
5977 VkDeviceMemory hMemory,
5978 VmaSuballocationType suballocationType,
5982 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5983 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5984 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5987 m_MemoryTypeIndex = memoryTypeIndex;
5988 m_SuballocationType = (uint8_t)suballocationType;
5989 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5990 m_DedicatedAllocation.m_hMemory = hMemory;
5991 m_DedicatedAllocation.m_pMappedData = pMappedData;
5994 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5995 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5996 VkDeviceSize GetSize()
const {
return m_Size; }
5997 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5998 void* GetUserData()
const {
return m_pUserData; }
5999 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6000 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6002 VmaDeviceMemoryBlock* GetBlock()
const
6004 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6005 return m_BlockAllocation.m_Block;
6007 VkDeviceSize GetOffset()
const;
6008 VkDeviceMemory GetMemory()
const;
6009 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6010 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6011 void* GetMappedData()
const;
6012 bool CanBecomeLost()
const;
6014 uint32_t GetLastUseFrameIndex()
const
6016 return m_LastUseFrameIndex.load();
6018 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6020 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6030 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6032 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6034 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6045 void BlockAllocMap();
6046 void BlockAllocUnmap();
6047 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6050 #if VMA_STATS_STRING_ENABLED
6051 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6052 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6054 void InitBufferImageUsage(uint32_t bufferImageUsage)
6056 VMA_ASSERT(m_BufferImageUsage == 0);
6057 m_BufferImageUsage = bufferImageUsage;
6060 void PrintParameters(
class VmaJsonWriter& json)
const;
6064 VkDeviceSize m_Alignment;
6065 VkDeviceSize m_Size;
6067 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6068 uint32_t m_MemoryTypeIndex;
6070 uint8_t m_SuballocationType;
6077 struct BlockAllocation
6079 VmaDeviceMemoryBlock* m_Block;
6080 VkDeviceSize m_Offset;
6081 bool m_CanBecomeLost;
6085 struct DedicatedAllocation
6087 VkDeviceMemory m_hMemory;
6088 void* m_pMappedData;
6094 BlockAllocation m_BlockAllocation;
6096 DedicatedAllocation m_DedicatedAllocation;
6099 #if VMA_STATS_STRING_ENABLED
6100 uint32_t m_CreationFrameIndex;
6101 uint32_t m_BufferImageUsage;
6111 struct VmaSuballocation
6113 VkDeviceSize offset;
6116 VmaSuballocationType type;
6120 struct VmaSuballocationOffsetLess
6122 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6124 return lhs.offset < rhs.offset;
6127 struct VmaSuballocationOffsetGreater
6129 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6131 return lhs.offset > rhs.offset;
6135 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6138 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6140 enum class VmaAllocationRequestType
6162 struct VmaAllocationRequest
6164 VkDeviceSize offset;
6165 VkDeviceSize sumFreeSize;
6166 VkDeviceSize sumItemSize;
6167 VmaSuballocationList::iterator item;
6168 size_t itemsToMakeLostCount;
6170 VmaAllocationRequestType type;
6172 VkDeviceSize CalcCost()
const
6174 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6182 class VmaBlockMetadata
6186 virtual ~VmaBlockMetadata() { }
6187 virtual void Init(VkDeviceSize size) { m_Size = size; }
6190 virtual bool Validate()
const = 0;
6191 VkDeviceSize GetSize()
const {
return m_Size; }
6192 virtual size_t GetAllocationCount()
const = 0;
6193 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6194 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6196 virtual bool IsEmpty()
const = 0;
6198 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6200 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6202 #if VMA_STATS_STRING_ENABLED
6203 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6209 virtual bool CreateAllocationRequest(
6210 uint32_t currentFrameIndex,
6211 uint32_t frameInUseCount,
6212 VkDeviceSize bufferImageGranularity,
6213 VkDeviceSize allocSize,
6214 VkDeviceSize allocAlignment,
6216 VmaSuballocationType allocType,
6217 bool canMakeOtherLost,
6220 VmaAllocationRequest* pAllocationRequest) = 0;
6222 virtual bool MakeRequestedAllocationsLost(
6223 uint32_t currentFrameIndex,
6224 uint32_t frameInUseCount,
6225 VmaAllocationRequest* pAllocationRequest) = 0;
6227 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6229 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6233 const VmaAllocationRequest& request,
6234 VmaSuballocationType type,
6235 VkDeviceSize allocSize,
6240 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6243 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6245 #if VMA_STATS_STRING_ENABLED
6246 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6247 VkDeviceSize unusedBytes,
6248 size_t allocationCount,
6249 size_t unusedRangeCount)
const;
6250 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6251 VkDeviceSize offset,
6253 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6254 VkDeviceSize offset,
6255 VkDeviceSize size)
const;
6256 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6260 VkDeviceSize m_Size;
6261 const VkAllocationCallbacks* m_pAllocationCallbacks;
6264 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6265 VMA_ASSERT(0 && "Validation failed: " #cond); \
6269 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6271 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6274 virtual ~VmaBlockMetadata_Generic();
6275 virtual void Init(VkDeviceSize size);
6277 virtual bool Validate()
const;
6278 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6279 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6280 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6281 virtual bool IsEmpty()
const;
6283 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6284 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6286 #if VMA_STATS_STRING_ENABLED
6287 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6290 virtual bool CreateAllocationRequest(
6291 uint32_t currentFrameIndex,
6292 uint32_t frameInUseCount,
6293 VkDeviceSize bufferImageGranularity,
6294 VkDeviceSize allocSize,
6295 VkDeviceSize allocAlignment,
6297 VmaSuballocationType allocType,
6298 bool canMakeOtherLost,
6300 VmaAllocationRequest* pAllocationRequest);
6302 virtual bool MakeRequestedAllocationsLost(
6303 uint32_t currentFrameIndex,
6304 uint32_t frameInUseCount,
6305 VmaAllocationRequest* pAllocationRequest);
6307 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6309 virtual VkResult CheckCorruption(
const void* pBlockData);
6312 const VmaAllocationRequest& request,
6313 VmaSuballocationType type,
6314 VkDeviceSize allocSize,
6318 virtual void FreeAtOffset(VkDeviceSize offset);
6323 bool IsBufferImageGranularityConflictPossible(
6324 VkDeviceSize bufferImageGranularity,
6325 VmaSuballocationType& inOutPrevSuballocType)
const;
6328 friend class VmaDefragmentationAlgorithm_Generic;
6329 friend class VmaDefragmentationAlgorithm_Fast;
6331 uint32_t m_FreeCount;
6332 VkDeviceSize m_SumFreeSize;
6333 VmaSuballocationList m_Suballocations;
6336 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6338 bool ValidateFreeSuballocationList()
const;
6342 bool CheckAllocation(
6343 uint32_t currentFrameIndex,
6344 uint32_t frameInUseCount,
6345 VkDeviceSize bufferImageGranularity,
6346 VkDeviceSize allocSize,
6347 VkDeviceSize allocAlignment,
6348 VmaSuballocationType allocType,
6349 VmaSuballocationList::const_iterator suballocItem,
6350 bool canMakeOtherLost,
6351 VkDeviceSize* pOffset,
6352 size_t* itemsToMakeLostCount,
6353 VkDeviceSize* pSumFreeSize,
6354 VkDeviceSize* pSumItemSize)
const;
6356 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6360 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6363 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6366 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6447 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6449 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6452 virtual ~VmaBlockMetadata_Linear();
6453 virtual void Init(VkDeviceSize size);
6455 virtual bool Validate()
const;
6456 virtual size_t GetAllocationCount()
const;
6457 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6458 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6459 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6461 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6462 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6464 #if VMA_STATS_STRING_ENABLED
6465 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6468 virtual bool CreateAllocationRequest(
6469 uint32_t currentFrameIndex,
6470 uint32_t frameInUseCount,
6471 VkDeviceSize bufferImageGranularity,
6472 VkDeviceSize allocSize,
6473 VkDeviceSize allocAlignment,
6475 VmaSuballocationType allocType,
6476 bool canMakeOtherLost,
6478 VmaAllocationRequest* pAllocationRequest);
6480 virtual bool MakeRequestedAllocationsLost(
6481 uint32_t currentFrameIndex,
6482 uint32_t frameInUseCount,
6483 VmaAllocationRequest* pAllocationRequest);
6485 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6487 virtual VkResult CheckCorruption(
const void* pBlockData);
6490 const VmaAllocationRequest& request,
6491 VmaSuballocationType type,
6492 VkDeviceSize allocSize,
6496 virtual void FreeAtOffset(VkDeviceSize offset);
6506 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6508 enum SECOND_VECTOR_MODE
6510 SECOND_VECTOR_EMPTY,
6515 SECOND_VECTOR_RING_BUFFER,
6521 SECOND_VECTOR_DOUBLE_STACK,
6524 VkDeviceSize m_SumFreeSize;
6525 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6526 uint32_t m_1stVectorIndex;
6527 SECOND_VECTOR_MODE m_2ndVectorMode;
6529 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6530 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6531 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6532 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6535 size_t m_1stNullItemsBeginCount;
6537 size_t m_1stNullItemsMiddleCount;
6539 size_t m_2ndNullItemsCount;
6541 bool ShouldCompact1st()
const;
6542 void CleanupAfterFree();
6544 bool CreateAllocationRequest_LowerAddress(
6545 uint32_t currentFrameIndex,
6546 uint32_t frameInUseCount,
6547 VkDeviceSize bufferImageGranularity,
6548 VkDeviceSize allocSize,
6549 VkDeviceSize allocAlignment,
6550 VmaSuballocationType allocType,
6551 bool canMakeOtherLost,
6553 VmaAllocationRequest* pAllocationRequest);
6554 bool CreateAllocationRequest_UpperAddress(
6555 uint32_t currentFrameIndex,
6556 uint32_t frameInUseCount,
6557 VkDeviceSize bufferImageGranularity,
6558 VkDeviceSize allocSize,
6559 VkDeviceSize allocAlignment,
6560 VmaSuballocationType allocType,
6561 bool canMakeOtherLost,
6563 VmaAllocationRequest* pAllocationRequest);
6577 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6579 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6582 virtual ~VmaBlockMetadata_Buddy();
6583 virtual void Init(VkDeviceSize size);
6585 virtual bool Validate()
const;
6586 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6587 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6588 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6589 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6591 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6592 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6594 #if VMA_STATS_STRING_ENABLED
6595 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6598 virtual bool CreateAllocationRequest(
6599 uint32_t currentFrameIndex,
6600 uint32_t frameInUseCount,
6601 VkDeviceSize bufferImageGranularity,
6602 VkDeviceSize allocSize,
6603 VkDeviceSize allocAlignment,
6605 VmaSuballocationType allocType,
6606 bool canMakeOtherLost,
6608 VmaAllocationRequest* pAllocationRequest);
6610 virtual bool MakeRequestedAllocationsLost(
6611 uint32_t currentFrameIndex,
6612 uint32_t frameInUseCount,
6613 VmaAllocationRequest* pAllocationRequest);
6615 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6617 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6620 const VmaAllocationRequest& request,
6621 VmaSuballocationType type,
6622 VkDeviceSize allocSize,
6625 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6626 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6629 static const VkDeviceSize MIN_NODE_SIZE = 32;
6630 static const size_t MAX_LEVELS = 30;
6632 struct ValidationContext
6634 size_t calculatedAllocationCount;
6635 size_t calculatedFreeCount;
6636 VkDeviceSize calculatedSumFreeSize;
6638 ValidationContext() :
6639 calculatedAllocationCount(0),
6640 calculatedFreeCount(0),
6641 calculatedSumFreeSize(0) { }
6646 VkDeviceSize offset;
6676 VkDeviceSize m_UsableSize;
6677 uint32_t m_LevelCount;
6683 } m_FreeList[MAX_LEVELS];
6685 size_t m_AllocationCount;
6689 VkDeviceSize m_SumFreeSize;
6691 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6692 void DeleteNode(Node* node);
6693 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6694 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6695 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6697 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6698 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6702 void AddToFreeListFront(uint32_t level, Node* node);
6706 void RemoveFromFreeList(uint32_t level, Node* node);
6708 #if VMA_STATS_STRING_ENABLED
6709 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6719 class VmaDeviceMemoryBlock
6721 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6723 VmaBlockMetadata* m_pMetadata;
6727 ~VmaDeviceMemoryBlock()
6729 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6730 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6737 uint32_t newMemoryTypeIndex,
6738 VkDeviceMemory newMemory,
6739 VkDeviceSize newSize,
6741 uint32_t algorithm);
6745 VmaPool GetParentPool()
const {
return m_hParentPool; }
6746 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6747 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6748 uint32_t GetId()
const {
return m_Id; }
6749 void* GetMappedData()
const {
return m_pMappedData; }
6752 bool Validate()
const;
6757 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6760 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6761 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6763 VkResult BindBufferMemory(
6766 VkDeviceSize allocationLocalOffset,
6769 VkResult BindImageMemory(
6772 VkDeviceSize allocationLocalOffset,
6778 uint32_t m_MemoryTypeIndex;
6780 VkDeviceMemory m_hMemory;
6788 uint32_t m_MapCount;
6789 void* m_pMappedData;
6792 struct VmaPointerLess
6794 bool operator()(
const void* lhs,
const void* rhs)
const
6800 struct VmaDefragmentationMove
6802 size_t srcBlockIndex;
6803 size_t dstBlockIndex;
6804 VkDeviceSize srcOffset;
6805 VkDeviceSize dstOffset;
6808 VmaDeviceMemoryBlock* pSrcBlock;
6809 VmaDeviceMemoryBlock* pDstBlock;
6812 class VmaDefragmentationAlgorithm;
6820 struct VmaBlockVector
6822 VMA_CLASS_NO_COPY(VmaBlockVector)
6827 uint32_t memoryTypeIndex,
6828 VkDeviceSize preferredBlockSize,
6829 size_t minBlockCount,
6830 size_t maxBlockCount,
6831 VkDeviceSize bufferImageGranularity,
6832 uint32_t frameInUseCount,
6833 bool explicitBlockSize,
6834 uint32_t algorithm);
6837 VkResult CreateMinBlocks();
6839 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6840 VmaPool GetParentPool()
const {
return m_hParentPool; }
6841 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6842 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6843 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6844 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6845 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6846 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6851 bool IsCorruptionDetectionEnabled()
const;
6854 uint32_t currentFrameIndex,
6856 VkDeviceSize alignment,
6858 VmaSuballocationType suballocType,
6859 size_t allocationCount,
6867 #if VMA_STATS_STRING_ENABLED
6868 void PrintDetailedMap(
class VmaJsonWriter& json);
6871 void MakePoolAllocationsLost(
6872 uint32_t currentFrameIndex,
6873 size_t* pLostAllocationCount);
6874 VkResult CheckCorruption();
6878 class VmaBlockVectorDefragmentationContext* pCtx,
6880 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6881 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6882 VkCommandBuffer commandBuffer);
6883 void DefragmentationEnd(
6884 class VmaBlockVectorDefragmentationContext* pCtx,
6888 uint32_t ProcessDefragmentations(
6889 class VmaBlockVectorDefragmentationContext *pCtx,
6892 void CommitDefragmentations(
6893 class VmaBlockVectorDefragmentationContext *pCtx,
6899 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6900 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6901 size_t CalcAllocationCount()
const;
6902 bool IsBufferImageGranularityConflictPossible()
const;
6905 friend class VmaDefragmentationAlgorithm_Generic;
6909 const uint32_t m_MemoryTypeIndex;
6910 const VkDeviceSize m_PreferredBlockSize;
6911 const size_t m_MinBlockCount;
6912 const size_t m_MaxBlockCount;
6913 const VkDeviceSize m_BufferImageGranularity;
6914 const uint32_t m_FrameInUseCount;
6915 const bool m_ExplicitBlockSize;
6916 const uint32_t m_Algorithm;
6917 VMA_RW_MUTEX m_Mutex;
6921 bool m_HasEmptyBlock;
6923 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6924 uint32_t m_NextBlockId;
6926 VkDeviceSize CalcMaxBlockSize()
const;
6929 void Remove(VmaDeviceMemoryBlock* pBlock);
6933 void IncrementallySortBlocks();
6935 VkResult AllocatePage(
6936 uint32_t currentFrameIndex,
6938 VkDeviceSize alignment,
6940 VmaSuballocationType suballocType,
6944 VkResult AllocateFromBlock(
6945 VmaDeviceMemoryBlock* pBlock,
6946 uint32_t currentFrameIndex,
6948 VkDeviceSize alignment,
6951 VmaSuballocationType suballocType,
6955 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6958 void ApplyDefragmentationMovesCpu(
6959 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6960 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6962 void ApplyDefragmentationMovesGpu(
6963 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6964 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6965 VkCommandBuffer commandBuffer);
6973 void UpdateHasEmptyBlock();
6978 VMA_CLASS_NO_COPY(VmaPool_T)
6980 VmaBlockVector m_BlockVector;
6985 VkDeviceSize preferredBlockSize);
6988 uint32_t GetId()
const {
return m_Id; }
6989 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6991 const char* GetName()
const {
return m_Name; }
6992 void SetName(
const char* pName);
6994 #if VMA_STATS_STRING_ENABLED
7010 class VmaDefragmentationAlgorithm
7012 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7014 VmaDefragmentationAlgorithm(
7016 VmaBlockVector* pBlockVector,
7017 uint32_t currentFrameIndex) :
7018 m_hAllocator(hAllocator),
7019 m_pBlockVector(pBlockVector),
7020 m_CurrentFrameIndex(currentFrameIndex)
7023 virtual ~VmaDefragmentationAlgorithm()
7027 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7028 virtual void AddAll() = 0;
7030 virtual VkResult Defragment(
7031 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7032 VkDeviceSize maxBytesToMove,
7033 uint32_t maxAllocationsToMove,
7036 virtual VkDeviceSize GetBytesMoved()
const = 0;
7037 virtual uint32_t GetAllocationsMoved()
const = 0;
7041 VmaBlockVector*
const m_pBlockVector;
7042 const uint32_t m_CurrentFrameIndex;
7044 struct AllocationInfo
7047 VkBool32* m_pChanged;
7050 m_hAllocation(VK_NULL_HANDLE),
7051 m_pChanged(VMA_NULL)
7055 m_hAllocation(hAlloc),
7056 m_pChanged(pChanged)
7062 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7064 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7066 VmaDefragmentationAlgorithm_Generic(
7068 VmaBlockVector* pBlockVector,
7069 uint32_t currentFrameIndex,
7070 bool overlappingMoveSupported);
7071 virtual ~VmaDefragmentationAlgorithm_Generic();
7073 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7074 virtual void AddAll() { m_AllAllocations =
true; }
7076 virtual VkResult Defragment(
7077 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7078 VkDeviceSize maxBytesToMove,
7079 uint32_t maxAllocationsToMove,
7082 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7083 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7086 uint32_t m_AllocationCount;
7087 bool m_AllAllocations;
7089 VkDeviceSize m_BytesMoved;
7090 uint32_t m_AllocationsMoved;
7092 struct AllocationInfoSizeGreater
7094 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7096 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7100 struct AllocationInfoOffsetGreater
7102 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7104 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7110 size_t m_OriginalBlockIndex;
7111 VmaDeviceMemoryBlock* m_pBlock;
7112 bool m_HasNonMovableAllocations;
7113 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7115 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7116 m_OriginalBlockIndex(SIZE_MAX),
7118 m_HasNonMovableAllocations(true),
7119 m_Allocations(pAllocationCallbacks)
7123 void CalcHasNonMovableAllocations()
7125 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7126 const size_t defragmentAllocCount = m_Allocations.size();
7127 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7130 void SortAllocationsBySizeDescending()
7132 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7135 void SortAllocationsByOffsetDescending()
7137 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7141 struct BlockPointerLess
7143 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7145 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7147 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7149 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7155 struct BlockInfoCompareMoveDestination
7157 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7159 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7163 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7167 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7175 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7176 BlockInfoVector m_Blocks;
7178 VkResult DefragmentRound(
7179 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7180 VkDeviceSize maxBytesToMove,
7181 uint32_t maxAllocationsToMove,
7182 bool freeOldAllocations);
7184 size_t CalcBlocksWithNonMovableCount()
const;
7186 static bool MoveMakesSense(
7187 size_t dstBlockIndex, VkDeviceSize dstOffset,
7188 size_t srcBlockIndex, VkDeviceSize srcOffset);
7191 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7193 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7195 VmaDefragmentationAlgorithm_Fast(
7197 VmaBlockVector* pBlockVector,
7198 uint32_t currentFrameIndex,
7199 bool overlappingMoveSupported);
7200 virtual ~VmaDefragmentationAlgorithm_Fast();
7202 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7203 virtual void AddAll() { m_AllAllocations =
true; }
7205 virtual VkResult Defragment(
7206 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7207 VkDeviceSize maxBytesToMove,
7208 uint32_t maxAllocationsToMove,
7211 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7212 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7217 size_t origBlockIndex;
7220 class FreeSpaceDatabase
7226 s.blockInfoIndex = SIZE_MAX;
7227 for(
size_t i = 0; i < MAX_COUNT; ++i)
7229 m_FreeSpaces[i] = s;
7233 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7235 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7241 size_t bestIndex = SIZE_MAX;
7242 for(
size_t i = 0; i < MAX_COUNT; ++i)
7245 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7250 if(m_FreeSpaces[i].size < size &&
7251 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7257 if(bestIndex != SIZE_MAX)
7259 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7260 m_FreeSpaces[bestIndex].offset = offset;
7261 m_FreeSpaces[bestIndex].size = size;
7265 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7266 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7268 size_t bestIndex = SIZE_MAX;
7269 VkDeviceSize bestFreeSpaceAfter = 0;
7270 for(
size_t i = 0; i < MAX_COUNT; ++i)
7273 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7275 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7277 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7279 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7281 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7284 bestFreeSpaceAfter = freeSpaceAfter;
7290 if(bestIndex != SIZE_MAX)
7292 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7293 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7295 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7298 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7299 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7300 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7305 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7315 static const size_t MAX_COUNT = 4;
7319 size_t blockInfoIndex;
7320 VkDeviceSize offset;
7322 } m_FreeSpaces[MAX_COUNT];
7325 const bool m_OverlappingMoveSupported;
7327 uint32_t m_AllocationCount;
7328 bool m_AllAllocations;
7330 VkDeviceSize m_BytesMoved;
7331 uint32_t m_AllocationsMoved;
7333 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7335 void PreprocessMetadata();
7336 void PostprocessMetadata();
7337 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7340 struct VmaBlockDefragmentationContext
7344 BLOCK_FLAG_USED = 0x00000001,
7350 class VmaBlockVectorDefragmentationContext
7352 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7356 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7357 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7358 uint32_t defragmentationMovesProcessed;
7359 uint32_t defragmentationMovesCommitted;
7360 bool hasDefragmentationPlan;
7362 VmaBlockVectorDefragmentationContext(
7365 VmaBlockVector* pBlockVector,
7366 uint32_t currFrameIndex);
7367 ~VmaBlockVectorDefragmentationContext();
7369 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7370 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7371 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7373 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7374 void AddAll() { m_AllAllocations =
true; }
7383 VmaBlockVector*
const m_pBlockVector;
7384 const uint32_t m_CurrFrameIndex;
7386 VmaDefragmentationAlgorithm* m_pAlgorithm;
7394 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7395 bool m_AllAllocations;
7398 struct VmaDefragmentationContext_T
7401 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7403 VmaDefragmentationContext_T(
7405 uint32_t currFrameIndex,
7408 ~VmaDefragmentationContext_T();
7410 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7411 void AddAllocations(
7412 uint32_t allocationCount,
7414 VkBool32* pAllocationsChanged);
7422 VkResult Defragment(
7423 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7424 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7428 VkResult DefragmentPassEnd();
7432 const uint32_t m_CurrFrameIndex;
7433 const uint32_t m_Flags;
7436 VkDeviceSize m_MaxCpuBytesToMove;
7437 uint32_t m_MaxCpuAllocationsToMove;
7438 VkDeviceSize m_MaxGpuBytesToMove;
7439 uint32_t m_MaxGpuAllocationsToMove;
7442 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7444 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7447 #if VMA_RECORDING_ENABLED
7454 void WriteConfiguration(
7455 const VkPhysicalDeviceProperties& devProps,
7456 const VkPhysicalDeviceMemoryProperties& memProps,
7457 uint32_t vulkanApiVersion,
7458 bool dedicatedAllocationExtensionEnabled,
7459 bool bindMemory2ExtensionEnabled,
7460 bool memoryBudgetExtensionEnabled,
7461 bool deviceCoherentMemoryExtensionEnabled);
7464 void RecordCreateAllocator(uint32_t frameIndex);
7465 void RecordDestroyAllocator(uint32_t frameIndex);
7466 void RecordCreatePool(uint32_t frameIndex,
7469 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7470 void RecordAllocateMemory(uint32_t frameIndex,
7471 const VkMemoryRequirements& vkMemReq,
7474 void RecordAllocateMemoryPages(uint32_t frameIndex,
7475 const VkMemoryRequirements& vkMemReq,
7477 uint64_t allocationCount,
7479 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7480 const VkMemoryRequirements& vkMemReq,
7481 bool requiresDedicatedAllocation,
7482 bool prefersDedicatedAllocation,
7485 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7486 const VkMemoryRequirements& vkMemReq,
7487 bool requiresDedicatedAllocation,
7488 bool prefersDedicatedAllocation,
7491 void RecordFreeMemory(uint32_t frameIndex,
7493 void RecordFreeMemoryPages(uint32_t frameIndex,
7494 uint64_t allocationCount,
7496 void RecordSetAllocationUserData(uint32_t frameIndex,
7498 const void* pUserData);
7499 void RecordCreateLostAllocation(uint32_t frameIndex,
7501 void RecordMapMemory(uint32_t frameIndex,
7503 void RecordUnmapMemory(uint32_t frameIndex,
7505 void RecordFlushAllocation(uint32_t frameIndex,
7506 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7507 void RecordInvalidateAllocation(uint32_t frameIndex,
7508 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7509 void RecordCreateBuffer(uint32_t frameIndex,
7510 const VkBufferCreateInfo& bufCreateInfo,
7513 void RecordCreateImage(uint32_t frameIndex,
7514 const VkImageCreateInfo& imageCreateInfo,
7517 void RecordDestroyBuffer(uint32_t frameIndex,
7519 void RecordDestroyImage(uint32_t frameIndex,
7521 void RecordTouchAllocation(uint32_t frameIndex,
7523 void RecordGetAllocationInfo(uint32_t frameIndex,
7525 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7527 void RecordDefragmentationBegin(uint32_t frameIndex,
7530 void RecordDefragmentationEnd(uint32_t frameIndex,
7532 void RecordSetPoolName(uint32_t frameIndex,
7543 class UserDataString
7547 const char* GetString()
const {
return m_Str; }
7557 VMA_MUTEX m_FileMutex;
7559 int64_t m_StartCounter;
7561 void GetBasicParams(CallParams& outParams);
7564 template<
typename T>
7565 void PrintPointerList(uint64_t count,
const T* pItems)
7569 fprintf(m_File,
"%p", pItems[0]);
7570 for(uint64_t i = 1; i < count; ++i)
7572 fprintf(m_File,
" %p", pItems[i]);
7577 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7581 #endif // #if VMA_RECORDING_ENABLED
7586 class VmaAllocationObjectAllocator
7588 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7590 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7592 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7597 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7600 struct VmaCurrentBudgetData
7602 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7603 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7605 #if VMA_MEMORY_BUDGET
7606 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7607 VMA_RW_MUTEX m_BudgetMutex;
7608 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7609 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7610 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7611 #endif // #if VMA_MEMORY_BUDGET
7613 VmaCurrentBudgetData()
7615 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7617 m_BlockBytes[heapIndex] = 0;
7618 m_AllocationBytes[heapIndex] = 0;
7619 #if VMA_MEMORY_BUDGET
7620 m_VulkanUsage[heapIndex] = 0;
7621 m_VulkanBudget[heapIndex] = 0;
7622 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7626 #if VMA_MEMORY_BUDGET
7627 m_OperationsSinceBudgetFetch = 0;
7631 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7633 m_AllocationBytes[heapIndex] += allocationSize;
7634 #if VMA_MEMORY_BUDGET
7635 ++m_OperationsSinceBudgetFetch;
7639 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7641 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7642 m_AllocationBytes[heapIndex] -= allocationSize;
7643 #if VMA_MEMORY_BUDGET
7644 ++m_OperationsSinceBudgetFetch;
7650 struct VmaAllocator_T
7652 VMA_CLASS_NO_COPY(VmaAllocator_T)
7655 uint32_t m_VulkanApiVersion;
7656 bool m_UseKhrDedicatedAllocation;
7657 bool m_UseKhrBindMemory2;
7658 bool m_UseExtMemoryBudget;
7659 bool m_UseAmdDeviceCoherentMemory;
7660 bool m_UseKhrBufferDeviceAddress;
7662 VkInstance m_hInstance;
7663 bool m_AllocationCallbacksSpecified;
7664 VkAllocationCallbacks m_AllocationCallbacks;
7666 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7669 uint32_t m_HeapSizeLimitMask;
7671 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7672 VkPhysicalDeviceMemoryProperties m_MemProps;
7675 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7678 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7679 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7680 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7682 VmaCurrentBudgetData m_Budget;
7688 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7690 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7694 return m_VulkanFunctions;
7697 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7699 VkDeviceSize GetBufferImageGranularity()
const
7702 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7703 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7706 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7707 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7709 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7711 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7712 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7715 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7717 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7718 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7721 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7723 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7724 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7725 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7728 bool IsIntegratedGpu()
const
7730 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7733 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7735 #if VMA_RECORDING_ENABLED
7736 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7739 void GetBufferMemoryRequirements(
7741 VkMemoryRequirements& memReq,
7742 bool& requiresDedicatedAllocation,
7743 bool& prefersDedicatedAllocation)
const;
7744 void GetImageMemoryRequirements(
7746 VkMemoryRequirements& memReq,
7747 bool& requiresDedicatedAllocation,
7748 bool& prefersDedicatedAllocation)
const;
7751 VkResult AllocateMemory(
7752 const VkMemoryRequirements& vkMemReq,
7753 bool requiresDedicatedAllocation,
7754 bool prefersDedicatedAllocation,
7755 VkBuffer dedicatedBuffer,
7756 VkBufferUsageFlags dedicatedBufferUsage,
7757 VkImage dedicatedImage,
7759 VmaSuballocationType suballocType,
7760 size_t allocationCount,
7765 size_t allocationCount,
7768 VkResult ResizeAllocation(
7770 VkDeviceSize newSize);
7772 void CalculateStats(
VmaStats* pStats);
7775 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7777 #if VMA_STATS_STRING_ENABLED
7778 void PrintDetailedMap(
class VmaJsonWriter& json);
7781 VkResult DefragmentationBegin(
7785 VkResult DefragmentationEnd(
7788 VkResult DefragmentationPassBegin(
7791 VkResult DefragmentationPassEnd(
7798 void DestroyPool(
VmaPool pool);
7801 void SetCurrentFrameIndex(uint32_t frameIndex);
7802 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7804 void MakePoolAllocationsLost(
7806 size_t* pLostAllocationCount);
7807 VkResult CheckPoolCorruption(
VmaPool hPool);
7808 VkResult CheckCorruption(uint32_t memoryTypeBits);
7813 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7815 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7817 VkResult BindVulkanBuffer(
7818 VkDeviceMemory memory,
7819 VkDeviceSize memoryOffset,
7823 VkResult BindVulkanImage(
7824 VkDeviceMemory memory,
7825 VkDeviceSize memoryOffset,
7832 VkResult BindBufferMemory(
7834 VkDeviceSize allocationLocalOffset,
7837 VkResult BindImageMemory(
7839 VkDeviceSize allocationLocalOffset,
7843 VkResult FlushOrInvalidateAllocation(
7845 VkDeviceSize offset, VkDeviceSize size,
7846 VMA_CACHE_OPERATION op);
7847 VkResult FlushOrInvalidateAllocations(
7848 uint32_t allocationCount,
7850 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
7851 VMA_CACHE_OPERATION op);
7853 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7859 uint32_t GetGpuDefragmentationMemoryTypeBits();
7862 VkDeviceSize m_PreferredLargeHeapBlockSize;
7864 VkPhysicalDevice m_PhysicalDevice;
7865 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7866 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7868 VMA_RW_MUTEX m_PoolsMutex;
7870 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7871 uint32_t m_NextPoolId;
7876 uint32_t m_GlobalMemoryTypeBits;
7878 #if VMA_RECORDING_ENABLED
7879 VmaRecorder* m_pRecorder;
7884 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
7885 void ImportVulkanFunctions_Static();
7890 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
7891 void ImportVulkanFunctions_Dynamic();
7894 void ValidateVulkanFunctions();
7896 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7898 VkResult AllocateMemoryOfType(
7900 VkDeviceSize alignment,
7901 bool dedicatedAllocation,
7902 VkBuffer dedicatedBuffer,
7903 VkBufferUsageFlags dedicatedBufferUsage,
7904 VkImage dedicatedImage,
7906 uint32_t memTypeIndex,
7907 VmaSuballocationType suballocType,
7908 size_t allocationCount,
7912 VkResult AllocateDedicatedMemoryPage(
7914 VmaSuballocationType suballocType,
7915 uint32_t memTypeIndex,
7916 const VkMemoryAllocateInfo& allocInfo,
7918 bool isUserDataString,
7923 VkResult AllocateDedicatedMemory(
7925 VmaSuballocationType suballocType,
7926 uint32_t memTypeIndex,
7929 bool isUserDataString,
7931 VkBuffer dedicatedBuffer,
7932 VkBufferUsageFlags dedicatedBufferUsage,
7933 VkImage dedicatedImage,
7934 size_t allocationCount,
7943 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7945 uint32_t CalculateGlobalMemoryTypeBits()
const;
7947 bool GetFlushOrInvalidateRange(
7949 VkDeviceSize offset, VkDeviceSize size,
7950 VkMappedMemoryRange& outRange)
const;
7952 #if VMA_MEMORY_BUDGET
7953 void UpdateVulkanBudget();
7954 #endif // #if VMA_MEMORY_BUDGET
7960 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7962 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7965 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7967 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7970 template<
typename T>
7973 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7976 template<
typename T>
7977 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7979 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7982 template<
typename T>
7983 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7988 VmaFree(hAllocator, ptr);
7992 template<
typename T>
7993 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7997 for(
size_t i = count; i--; )
7999 VmaFree(hAllocator, ptr);
8006 #if VMA_STATS_STRING_ENABLED
8008 class VmaStringBuilder
8011 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8012 size_t GetLength()
const {
return m_Data.size(); }
8013 const char* GetData()
const {
return m_Data.data(); }
8015 void Add(
char ch) { m_Data.push_back(ch); }
8016 void Add(
const char* pStr);
8017 void AddNewLine() { Add(
'\n'); }
8018 void AddNumber(uint32_t num);
8019 void AddNumber(uint64_t num);
8020 void AddPointer(
const void* ptr);
8023 VmaVector< char, VmaStlAllocator<char> > m_Data;
8026 void VmaStringBuilder::Add(
const char* pStr)
8028 const size_t strLen = strlen(pStr);
8031 const size_t oldCount = m_Data.size();
8032 m_Data.resize(oldCount + strLen);
8033 memcpy(m_Data.data() + oldCount, pStr, strLen);
8037 void VmaStringBuilder::AddNumber(uint32_t num)
8044 *--p =
'0' + (num % 10);
8051 void VmaStringBuilder::AddNumber(uint64_t num)
8058 *--p =
'0' + (num % 10);
8065 void VmaStringBuilder::AddPointer(
const void* ptr)
8068 VmaPtrToStr(buf,
sizeof(buf), ptr);
8072 #endif // #if VMA_STATS_STRING_ENABLED
8077 #if VMA_STATS_STRING_ENABLED
8081 VMA_CLASS_NO_COPY(VmaJsonWriter)
8083 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8086 void BeginObject(
bool singleLine =
false);
8089 void BeginArray(
bool singleLine =
false);
8092 void WriteString(
const char* pStr);
8093 void BeginString(
const char* pStr = VMA_NULL);
8094 void ContinueString(
const char* pStr);
8095 void ContinueString(uint32_t n);
8096 void ContinueString(uint64_t n);
8097 void ContinueString_Pointer(
const void* ptr);
8098 void EndString(
const char* pStr = VMA_NULL);
8100 void WriteNumber(uint32_t n);
8101 void WriteNumber(uint64_t n);
8102 void WriteBool(
bool b);
8106 static const char*
const INDENT;
8108 enum COLLECTION_TYPE
8110 COLLECTION_TYPE_OBJECT,
8111 COLLECTION_TYPE_ARRAY,
8115 COLLECTION_TYPE type;
8116 uint32_t valueCount;
8117 bool singleLineMode;
8120 VmaStringBuilder& m_SB;
8121 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8122 bool m_InsideString;
8124 void BeginValue(
bool isString);
8125 void WriteIndent(
bool oneLess =
false);
8128 const char*
const VmaJsonWriter::INDENT =
" ";
8130 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8132 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8133 m_InsideString(false)
8137 VmaJsonWriter::~VmaJsonWriter()
8139 VMA_ASSERT(!m_InsideString);
8140 VMA_ASSERT(m_Stack.empty());
8143 void VmaJsonWriter::BeginObject(
bool singleLine)
8145 VMA_ASSERT(!m_InsideString);
8151 item.type = COLLECTION_TYPE_OBJECT;
8152 item.valueCount = 0;
8153 item.singleLineMode = singleLine;
8154 m_Stack.push_back(item);
8157 void VmaJsonWriter::EndObject()
8159 VMA_ASSERT(!m_InsideString);
8164 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8168 void VmaJsonWriter::BeginArray(
bool singleLine)
8170 VMA_ASSERT(!m_InsideString);
8176 item.type = COLLECTION_TYPE_ARRAY;
8177 item.valueCount = 0;
8178 item.singleLineMode = singleLine;
8179 m_Stack.push_back(item);
8182 void VmaJsonWriter::EndArray()
8184 VMA_ASSERT(!m_InsideString);
8189 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8193 void VmaJsonWriter::WriteString(
const char* pStr)
8199 void VmaJsonWriter::BeginString(
const char* pStr)
8201 VMA_ASSERT(!m_InsideString);
8205 m_InsideString =
true;
8206 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8208 ContinueString(pStr);
8212 void VmaJsonWriter::ContinueString(
const char* pStr)
8214 VMA_ASSERT(m_InsideString);
8216 const size_t strLen = strlen(pStr);
8217 for(
size_t i = 0; i < strLen; ++i)
8250 VMA_ASSERT(0 &&
"Character not currently supported.");
8256 void VmaJsonWriter::ContinueString(uint32_t n)
8258 VMA_ASSERT(m_InsideString);
8262 void VmaJsonWriter::ContinueString(uint64_t n)
8264 VMA_ASSERT(m_InsideString);
8268 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8270 VMA_ASSERT(m_InsideString);
8271 m_SB.AddPointer(ptr);
8274 void VmaJsonWriter::EndString(
const char* pStr)
8276 VMA_ASSERT(m_InsideString);
8277 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8279 ContinueString(pStr);
8282 m_InsideString =
false;
8285 void VmaJsonWriter::WriteNumber(uint32_t n)
8287 VMA_ASSERT(!m_InsideString);
8292 void VmaJsonWriter::WriteNumber(uint64_t n)
8294 VMA_ASSERT(!m_InsideString);
8299 void VmaJsonWriter::WriteBool(
bool b)
8301 VMA_ASSERT(!m_InsideString);
8303 m_SB.Add(b ?
"true" :
"false");
8306 void VmaJsonWriter::WriteNull()
8308 VMA_ASSERT(!m_InsideString);
8313 void VmaJsonWriter::BeginValue(
bool isString)
8315 if(!m_Stack.empty())
8317 StackItem& currItem = m_Stack.back();
8318 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8319 currItem.valueCount % 2 == 0)
8321 VMA_ASSERT(isString);
8324 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8325 currItem.valueCount % 2 != 0)
8329 else if(currItem.valueCount > 0)
8338 ++currItem.valueCount;
8342 void VmaJsonWriter::WriteIndent(
bool oneLess)
8344 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8348 size_t count = m_Stack.size();
8349 if(count > 0 && oneLess)
8353 for(
size_t i = 0; i < count; ++i)
8360 #endif // #if VMA_STATS_STRING_ENABLED
8364 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8366 if(IsUserDataString())
8368 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8370 FreeUserDataString(hAllocator);
8372 if(pUserData != VMA_NULL)
8374 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8379 m_pUserData = pUserData;
8383 void VmaAllocation_T::ChangeBlockAllocation(
8385 VmaDeviceMemoryBlock* block,
8386 VkDeviceSize offset)
8388 VMA_ASSERT(block != VMA_NULL);
8389 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8392 if(block != m_BlockAllocation.m_Block)
8394 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8395 if(IsPersistentMap())
8397 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8398 block->Map(hAllocator, mapRefCount, VMA_NULL);
8401 m_BlockAllocation.m_Block = block;
8402 m_BlockAllocation.m_Offset = offset;
8405 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8407 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8408 m_BlockAllocation.m_Offset = newOffset;
8411 VkDeviceSize VmaAllocation_T::GetOffset()
const
8415 case ALLOCATION_TYPE_BLOCK:
8416 return m_BlockAllocation.m_Offset;
8417 case ALLOCATION_TYPE_DEDICATED:
8425 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8429 case ALLOCATION_TYPE_BLOCK:
8430 return m_BlockAllocation.m_Block->GetDeviceMemory();
8431 case ALLOCATION_TYPE_DEDICATED:
8432 return m_DedicatedAllocation.m_hMemory;
8435 return VK_NULL_HANDLE;
8439 void* VmaAllocation_T::GetMappedData()
const
8443 case ALLOCATION_TYPE_BLOCK:
8446 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8447 VMA_ASSERT(pBlockData != VMA_NULL);
8448 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8455 case ALLOCATION_TYPE_DEDICATED:
8456 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8457 return m_DedicatedAllocation.m_pMappedData;
8464 bool VmaAllocation_T::CanBecomeLost()
const
8468 case ALLOCATION_TYPE_BLOCK:
8469 return m_BlockAllocation.m_CanBecomeLost;
8470 case ALLOCATION_TYPE_DEDICATED:
8478 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8480 VMA_ASSERT(CanBecomeLost());
8486 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8489 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8494 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8500 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8510 #if VMA_STATS_STRING_ENABLED
8513 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8522 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8524 json.WriteString(
"Type");
8525 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8527 json.WriteString(
"Size");
8528 json.WriteNumber(m_Size);
8530 if(m_pUserData != VMA_NULL)
8532 json.WriteString(
"UserData");
8533 if(IsUserDataString())
8535 json.WriteString((
const char*)m_pUserData);
8540 json.ContinueString_Pointer(m_pUserData);
8545 json.WriteString(
"CreationFrameIndex");
8546 json.WriteNumber(m_CreationFrameIndex);
8548 json.WriteString(
"LastUseFrameIndex");
8549 json.WriteNumber(GetLastUseFrameIndex());
8551 if(m_BufferImageUsage != 0)
8553 json.WriteString(
"Usage");
8554 json.WriteNumber(m_BufferImageUsage);
8560 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8562 VMA_ASSERT(IsUserDataString());
8563 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8564 m_pUserData = VMA_NULL;
8567 void VmaAllocation_T::BlockAllocMap()
8569 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8571 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8577 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8581 void VmaAllocation_T::BlockAllocUnmap()
8583 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8585 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8591 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8595 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8597 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8601 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8603 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8604 *ppData = m_DedicatedAllocation.m_pMappedData;
8610 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8611 return VK_ERROR_MEMORY_MAP_FAILED;
8616 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8617 hAllocator->m_hDevice,
8618 m_DedicatedAllocation.m_hMemory,
8623 if(result == VK_SUCCESS)
8625 m_DedicatedAllocation.m_pMappedData = *ppData;
8632 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8634 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8636 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8641 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8642 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8643 hAllocator->m_hDevice,
8644 m_DedicatedAllocation.m_hMemory);
8649 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8653 #if VMA_STATS_STRING_ENABLED
8655 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8659 json.WriteString(
"Blocks");
8662 json.WriteString(
"Allocations");
8665 json.WriteString(
"UnusedRanges");
8668 json.WriteString(
"UsedBytes");
8671 json.WriteString(
"UnusedBytes");
8676 json.WriteString(
"AllocationSize");
8677 json.BeginObject(
true);
8678 json.WriteString(
"Min");
8680 json.WriteString(
"Avg");
8682 json.WriteString(
"Max");
8689 json.WriteString(
"UnusedRangeSize");
8690 json.BeginObject(
true);
8691 json.WriteString(
"Min");
8693 json.WriteString(
"Avg");
8695 json.WriteString(
"Max");
8703 #endif // #if VMA_STATS_STRING_ENABLED
8705 struct VmaSuballocationItemSizeLess
8708 const VmaSuballocationList::iterator lhs,
8709 const VmaSuballocationList::iterator rhs)
const
8711 return lhs->size < rhs->size;
8714 const VmaSuballocationList::iterator lhs,
8715 VkDeviceSize rhsSize)
const
8717 return lhs->size < rhsSize;
8725 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8727 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8731 #if VMA_STATS_STRING_ENABLED
8733 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8734 VkDeviceSize unusedBytes,
8735 size_t allocationCount,
8736 size_t unusedRangeCount)
const
8740 json.WriteString(
"TotalBytes");
8741 json.WriteNumber(GetSize());
8743 json.WriteString(
"UnusedBytes");
8744 json.WriteNumber(unusedBytes);
8746 json.WriteString(
"Allocations");
8747 json.WriteNumber((uint64_t)allocationCount);
8749 json.WriteString(
"UnusedRanges");
8750 json.WriteNumber((uint64_t)unusedRangeCount);
8752 json.WriteString(
"Suballocations");
8756 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8757 VkDeviceSize offset,
8760 json.BeginObject(
true);
8762 json.WriteString(
"Offset");
8763 json.WriteNumber(offset);
8765 hAllocation->PrintParameters(json);
8770 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8771 VkDeviceSize offset,
8772 VkDeviceSize size)
const
8774 json.BeginObject(
true);
8776 json.WriteString(
"Offset");
8777 json.WriteNumber(offset);
8779 json.WriteString(
"Type");
8780 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8782 json.WriteString(
"Size");
8783 json.WriteNumber(size);
8788 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8794 #endif // #if VMA_STATS_STRING_ENABLED
8799 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8800 VmaBlockMetadata(hAllocator),
8803 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8804 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8808 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8812 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8814 VmaBlockMetadata::Init(size);
8817 m_SumFreeSize = size;
8819 VmaSuballocation suballoc = {};
8820 suballoc.offset = 0;
8821 suballoc.size = size;
8822 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8823 suballoc.hAllocation = VK_NULL_HANDLE;
8825 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8826 m_Suballocations.push_back(suballoc);
8827 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8829 m_FreeSuballocationsBySize.push_back(suballocItem);
8832 bool VmaBlockMetadata_Generic::Validate()
const
8834 VMA_VALIDATE(!m_Suballocations.empty());
8837 VkDeviceSize calculatedOffset = 0;
8839 uint32_t calculatedFreeCount = 0;
8841 VkDeviceSize calculatedSumFreeSize = 0;
8844 size_t freeSuballocationsToRegister = 0;
8846 bool prevFree =
false;
8848 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8849 suballocItem != m_Suballocations.cend();
8852 const VmaSuballocation& subAlloc = *suballocItem;
8855 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8857 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8859 VMA_VALIDATE(!prevFree || !currFree);
8861 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8865 calculatedSumFreeSize += subAlloc.size;
8866 ++calculatedFreeCount;
8867 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8869 ++freeSuballocationsToRegister;
8873 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8877 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8878 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8881 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8884 calculatedOffset += subAlloc.size;
8885 prevFree = currFree;
8890 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8892 VkDeviceSize lastSize = 0;
8893 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8895 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8898 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8900 VMA_VALIDATE(suballocItem->size >= lastSize);
8902 lastSize = suballocItem->size;
8906 VMA_VALIDATE(ValidateFreeSuballocationList());
8907 VMA_VALIDATE(calculatedOffset == GetSize());
8908 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8909 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8914 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8916 if(!m_FreeSuballocationsBySize.empty())
8918 return m_FreeSuballocationsBySize.back()->size;
8926 bool VmaBlockMetadata_Generic::IsEmpty()
const
8928 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8931 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8935 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8947 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8948 suballocItem != m_Suballocations.cend();
8951 const VmaSuballocation& suballoc = *suballocItem;
8952 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8965 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8967 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8969 inoutStats.
size += GetSize();
8976 #if VMA_STATS_STRING_ENABLED
8978 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8980 PrintDetailedMap_Begin(json,
8982 m_Suballocations.size() - (
size_t)m_FreeCount,
8986 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8987 suballocItem != m_Suballocations.cend();
8988 ++suballocItem, ++i)
8990 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8992 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8996 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9000 PrintDetailedMap_End(json);
9003 #endif // #if VMA_STATS_STRING_ENABLED
9005 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9006 uint32_t currentFrameIndex,
9007 uint32_t frameInUseCount,
9008 VkDeviceSize bufferImageGranularity,
9009 VkDeviceSize allocSize,
9010 VkDeviceSize allocAlignment,
9012 VmaSuballocationType allocType,
9013 bool canMakeOtherLost,
9015 VmaAllocationRequest* pAllocationRequest)
9017 VMA_ASSERT(allocSize > 0);
9018 VMA_ASSERT(!upperAddress);
9019 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9020 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9021 VMA_HEAVY_ASSERT(Validate());
9023 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9026 if(canMakeOtherLost ==
false &&
9027 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9033 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9034 if(freeSuballocCount > 0)
9039 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9040 m_FreeSuballocationsBySize.data(),
9041 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9042 allocSize + 2 * VMA_DEBUG_MARGIN,
9043 VmaSuballocationItemSizeLess());
9044 size_t index = it - m_FreeSuballocationsBySize.data();
9045 for(; index < freeSuballocCount; ++index)
9050 bufferImageGranularity,
9054 m_FreeSuballocationsBySize[index],
9056 &pAllocationRequest->offset,
9057 &pAllocationRequest->itemsToMakeLostCount,
9058 &pAllocationRequest->sumFreeSize,
9059 &pAllocationRequest->sumItemSize))
9061 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9066 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9068 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9069 it != m_Suballocations.end();
9072 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9075 bufferImageGranularity,
9081 &pAllocationRequest->offset,
9082 &pAllocationRequest->itemsToMakeLostCount,
9083 &pAllocationRequest->sumFreeSize,
9084 &pAllocationRequest->sumItemSize))
9086 pAllocationRequest->item = it;
9094 for(
size_t index = freeSuballocCount; index--; )
9099 bufferImageGranularity,
9103 m_FreeSuballocationsBySize[index],
9105 &pAllocationRequest->offset,
9106 &pAllocationRequest->itemsToMakeLostCount,
9107 &pAllocationRequest->sumFreeSize,
9108 &pAllocationRequest->sumItemSize))
9110 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9117 if(canMakeOtherLost)
9122 VmaAllocationRequest tmpAllocRequest = {};
9123 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9124 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9125 suballocIt != m_Suballocations.end();
9128 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9129 suballocIt->hAllocation->CanBecomeLost())
9134 bufferImageGranularity,
9140 &tmpAllocRequest.offset,
9141 &tmpAllocRequest.itemsToMakeLostCount,
9142 &tmpAllocRequest.sumFreeSize,
9143 &tmpAllocRequest.sumItemSize))
9147 *pAllocationRequest = tmpAllocRequest;
9148 pAllocationRequest->item = suballocIt;
9151 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9153 *pAllocationRequest = tmpAllocRequest;
9154 pAllocationRequest->item = suballocIt;
9167 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9168 uint32_t currentFrameIndex,
9169 uint32_t frameInUseCount,
9170 VmaAllocationRequest* pAllocationRequest)
9172 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9174 while(pAllocationRequest->itemsToMakeLostCount > 0)
9176 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9178 ++pAllocationRequest->item;
9180 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9181 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9182 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9183 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9185 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9186 --pAllocationRequest->itemsToMakeLostCount;
9194 VMA_HEAVY_ASSERT(Validate());
9195 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9196 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9201 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9203 uint32_t lostAllocationCount = 0;
9204 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9205 it != m_Suballocations.end();
9208 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9209 it->hAllocation->CanBecomeLost() &&
9210 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9212 it = FreeSuballocation(it);
9213 ++lostAllocationCount;
9216 return lostAllocationCount;
9219 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9221 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9222 it != m_Suballocations.end();
9225 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9227 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9229 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9230 return VK_ERROR_VALIDATION_FAILED_EXT;
9232 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9234 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9235 return VK_ERROR_VALIDATION_FAILED_EXT;
9243 void VmaBlockMetadata_Generic::Alloc(
9244 const VmaAllocationRequest& request,
9245 VmaSuballocationType type,
9246 VkDeviceSize allocSize,
9249 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9250 VMA_ASSERT(request.item != m_Suballocations.end());
9251 VmaSuballocation& suballoc = *request.item;
9253 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9255 VMA_ASSERT(request.offset >= suballoc.offset);
9256 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9257 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9258 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9262 UnregisterFreeSuballocation(request.item);
9264 suballoc.offset = request.offset;
9265 suballoc.size = allocSize;
9266 suballoc.type = type;
9267 suballoc.hAllocation = hAllocation;
9272 VmaSuballocation paddingSuballoc = {};
9273 paddingSuballoc.offset = request.offset + allocSize;
9274 paddingSuballoc.size = paddingEnd;
9275 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9276 VmaSuballocationList::iterator next = request.item;
9278 const VmaSuballocationList::iterator paddingEndItem =
9279 m_Suballocations.insert(next, paddingSuballoc);
9280 RegisterFreeSuballocation(paddingEndItem);
9286 VmaSuballocation paddingSuballoc = {};
9287 paddingSuballoc.offset = request.offset - paddingBegin;
9288 paddingSuballoc.size = paddingBegin;
9289 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9290 const VmaSuballocationList::iterator paddingBeginItem =
9291 m_Suballocations.insert(request.item, paddingSuballoc);
9292 RegisterFreeSuballocation(paddingBeginItem);
9296 m_FreeCount = m_FreeCount - 1;
9297 if(paddingBegin > 0)
9305 m_SumFreeSize -= allocSize;
9308 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9310 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9311 suballocItem != m_Suballocations.end();
9314 VmaSuballocation& suballoc = *suballocItem;
9315 if(suballoc.hAllocation == allocation)
9317 FreeSuballocation(suballocItem);
9318 VMA_HEAVY_ASSERT(Validate());
9322 VMA_ASSERT(0 &&
"Not found!");
9325 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9327 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9328 suballocItem != m_Suballocations.end();
9331 VmaSuballocation& suballoc = *suballocItem;
9332 if(suballoc.offset == offset)
9334 FreeSuballocation(suballocItem);
9338 VMA_ASSERT(0 &&
"Not found!");
9341 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9343 VkDeviceSize lastSize = 0;
9344 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9346 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9348 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9349 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9350 VMA_VALIDATE(it->size >= lastSize);
9351 lastSize = it->size;
9356 bool VmaBlockMetadata_Generic::CheckAllocation(
9357 uint32_t currentFrameIndex,
9358 uint32_t frameInUseCount,
9359 VkDeviceSize bufferImageGranularity,
9360 VkDeviceSize allocSize,
9361 VkDeviceSize allocAlignment,
9362 VmaSuballocationType allocType,
9363 VmaSuballocationList::const_iterator suballocItem,
9364 bool canMakeOtherLost,
9365 VkDeviceSize* pOffset,
9366 size_t* itemsToMakeLostCount,
9367 VkDeviceSize* pSumFreeSize,
9368 VkDeviceSize* pSumItemSize)
const
9370 VMA_ASSERT(allocSize > 0);
9371 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9372 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9373 VMA_ASSERT(pOffset != VMA_NULL);
9375 *itemsToMakeLostCount = 0;
9379 if(canMakeOtherLost)
9381 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9383 *pSumFreeSize = suballocItem->size;
9387 if(suballocItem->hAllocation->CanBecomeLost() &&
9388 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9390 ++*itemsToMakeLostCount;
9391 *pSumItemSize = suballocItem->size;
9400 if(GetSize() - suballocItem->offset < allocSize)
9406 *pOffset = suballocItem->offset;
9409 if(VMA_DEBUG_MARGIN > 0)
9411 *pOffset += VMA_DEBUG_MARGIN;
9415 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9419 if(bufferImageGranularity > 1)
9421 bool bufferImageGranularityConflict =
false;
9422 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9423 while(prevSuballocItem != m_Suballocations.cbegin())
9426 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9427 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9429 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9431 bufferImageGranularityConflict =
true;
9439 if(bufferImageGranularityConflict)
9441 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9447 if(*pOffset >= suballocItem->offset + suballocItem->size)
9453 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9456 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9458 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9460 if(suballocItem->offset + totalSize > GetSize())
9467 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9468 if(totalSize > suballocItem->size)
9470 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9471 while(remainingSize > 0)
9474 if(lastSuballocItem == m_Suballocations.cend())
9478 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9480 *pSumFreeSize += lastSuballocItem->size;
9484 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9485 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9486 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9488 ++*itemsToMakeLostCount;
9489 *pSumItemSize += lastSuballocItem->size;
9496 remainingSize = (lastSuballocItem->size < remainingSize) ?
9497 remainingSize - lastSuballocItem->size : 0;
9503 if(bufferImageGranularity > 1)
9505 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9507 while(nextSuballocItem != m_Suballocations.cend())
9509 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9510 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9512 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9514 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9515 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9516 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9518 ++*itemsToMakeLostCount;
9537 const VmaSuballocation& suballoc = *suballocItem;
9538 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9540 *pSumFreeSize = suballoc.size;
9543 if(suballoc.size < allocSize)
9549 *pOffset = suballoc.offset;
9552 if(VMA_DEBUG_MARGIN > 0)
9554 *pOffset += VMA_DEBUG_MARGIN;
9558 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9562 if(bufferImageGranularity > 1)
9564 bool bufferImageGranularityConflict =
false;
9565 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9566 while(prevSuballocItem != m_Suballocations.cbegin())
9569 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9570 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9572 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9574 bufferImageGranularityConflict =
true;
9582 if(bufferImageGranularityConflict)
9584 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9589 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9592 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9595 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9602 if(bufferImageGranularity > 1)
9604 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9606 while(nextSuballocItem != m_Suballocations.cend())
9608 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9609 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9611 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9630 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9632 VMA_ASSERT(item != m_Suballocations.end());
9633 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9635 VmaSuballocationList::iterator nextItem = item;
9637 VMA_ASSERT(nextItem != m_Suballocations.end());
9638 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9640 item->size += nextItem->size;
9642 m_Suballocations.erase(nextItem);
9645 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9648 VmaSuballocation& suballoc = *suballocItem;
9649 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9650 suballoc.hAllocation = VK_NULL_HANDLE;
9654 m_SumFreeSize += suballoc.size;
9657 bool mergeWithNext =
false;
9658 bool mergeWithPrev =
false;
9660 VmaSuballocationList::iterator nextItem = suballocItem;
9662 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9664 mergeWithNext =
true;
9667 VmaSuballocationList::iterator prevItem = suballocItem;
9668 if(suballocItem != m_Suballocations.begin())
9671 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9673 mergeWithPrev =
true;
9679 UnregisterFreeSuballocation(nextItem);
9680 MergeFreeWithNext(suballocItem);
9685 UnregisterFreeSuballocation(prevItem);
9686 MergeFreeWithNext(prevItem);
9687 RegisterFreeSuballocation(prevItem);
9692 RegisterFreeSuballocation(suballocItem);
9693 return suballocItem;
9697 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9699 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9700 VMA_ASSERT(item->size > 0);
9704 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9706 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9708 if(m_FreeSuballocationsBySize.empty())
9710 m_FreeSuballocationsBySize.push_back(item);
9714 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9722 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9724 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9725 VMA_ASSERT(item->size > 0);
9729 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9731 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9733 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9734 m_FreeSuballocationsBySize.data(),
9735 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9737 VmaSuballocationItemSizeLess());
9738 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9739 index < m_FreeSuballocationsBySize.size();
9742 if(m_FreeSuballocationsBySize[index] == item)
9744 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9747 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9749 VMA_ASSERT(0 &&
"Not found.");
9755 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9756 VkDeviceSize bufferImageGranularity,
9757 VmaSuballocationType& inOutPrevSuballocType)
const
9759 if(bufferImageGranularity == 1 || IsEmpty())
9764 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9765 bool typeConflictFound =
false;
9766 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9767 it != m_Suballocations.cend();
9770 const VmaSuballocationType suballocType = it->type;
9771 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9773 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9774 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9776 typeConflictFound =
true;
9778 inOutPrevSuballocType = suballocType;
9782 return typeConflictFound || minAlignment >= bufferImageGranularity;
9788 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9789 VmaBlockMetadata(hAllocator),
9791 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9792 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9793 m_1stVectorIndex(0),
9794 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9795 m_1stNullItemsBeginCount(0),
9796 m_1stNullItemsMiddleCount(0),
9797 m_2ndNullItemsCount(0)
9801 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9805 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9807 VmaBlockMetadata::Init(size);
9808 m_SumFreeSize = size;
9811 bool VmaBlockMetadata_Linear::Validate()
const
9813 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9814 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9816 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9817 VMA_VALIDATE(!suballocations1st.empty() ||
9818 suballocations2nd.empty() ||
9819 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9821 if(!suballocations1st.empty())
9824 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9826 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9828 if(!suballocations2nd.empty())
9831 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9834 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9835 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9837 VkDeviceSize sumUsedSize = 0;
9838 const size_t suballoc1stCount = suballocations1st.size();
9839 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9841 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9843 const size_t suballoc2ndCount = suballocations2nd.size();
9844 size_t nullItem2ndCount = 0;
9845 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9847 const VmaSuballocation& suballoc = suballocations2nd[i];
9848 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9850 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9851 VMA_VALIDATE(suballoc.offset >= offset);
9855 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9856 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9857 sumUsedSize += suballoc.size;
9864 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9867 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9870 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9872 const VmaSuballocation& suballoc = suballocations1st[i];
9873 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9874 suballoc.hAllocation == VK_NULL_HANDLE);
9877 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9879 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9881 const VmaSuballocation& suballoc = suballocations1st[i];
9882 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9884 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9885 VMA_VALIDATE(suballoc.offset >= offset);
9886 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9890 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9891 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9892 sumUsedSize += suballoc.size;
9899 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9901 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9903 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9905 const size_t suballoc2ndCount = suballocations2nd.size();
9906 size_t nullItem2ndCount = 0;
9907 for(
size_t i = suballoc2ndCount; i--; )
9909 const VmaSuballocation& suballoc = suballocations2nd[i];
9910 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9912 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9913 VMA_VALIDATE(suballoc.offset >= offset);
9917 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9918 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9919 sumUsedSize += suballoc.size;
9926 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9929 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9932 VMA_VALIDATE(offset <= GetSize());
9933 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9938 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9940 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9941 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9944 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9946 const VkDeviceSize size = GetSize();
9958 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9960 switch(m_2ndVectorMode)
9962 case SECOND_VECTOR_EMPTY:
9968 const size_t suballocations1stCount = suballocations1st.size();
9969 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9970 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9971 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9973 firstSuballoc.offset,
9974 size - (lastSuballoc.offset + lastSuballoc.size));
9978 case SECOND_VECTOR_RING_BUFFER:
9983 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9984 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9985 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9986 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9990 case SECOND_VECTOR_DOUBLE_STACK:
9995 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9996 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9997 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9998 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10008 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10010 const VkDeviceSize size = GetSize();
10011 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10012 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10013 const size_t suballoc1stCount = suballocations1st.size();
10014 const size_t suballoc2ndCount = suballocations2nd.size();
10025 VkDeviceSize lastOffset = 0;
10027 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10029 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10030 size_t nextAlloc2ndIndex = 0;
10031 while(lastOffset < freeSpace2ndTo1stEnd)
10034 while(nextAlloc2ndIndex < suballoc2ndCount &&
10035 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10037 ++nextAlloc2ndIndex;
10041 if(nextAlloc2ndIndex < suballoc2ndCount)
10043 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10046 if(lastOffset < suballoc.offset)
10049 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10063 lastOffset = suballoc.offset + suballoc.size;
10064 ++nextAlloc2ndIndex;
10070 if(lastOffset < freeSpace2ndTo1stEnd)
10072 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10080 lastOffset = freeSpace2ndTo1stEnd;
10085 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10086 const VkDeviceSize freeSpace1stTo2ndEnd =
10087 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10088 while(lastOffset < freeSpace1stTo2ndEnd)
10091 while(nextAlloc1stIndex < suballoc1stCount &&
10092 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10094 ++nextAlloc1stIndex;
10098 if(nextAlloc1stIndex < suballoc1stCount)
10100 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10103 if(lastOffset < suballoc.offset)
10106 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10120 lastOffset = suballoc.offset + suballoc.size;
10121 ++nextAlloc1stIndex;
10127 if(lastOffset < freeSpace1stTo2ndEnd)
10129 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10137 lastOffset = freeSpace1stTo2ndEnd;
10141 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10143 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10144 while(lastOffset < size)
10147 while(nextAlloc2ndIndex != SIZE_MAX &&
10148 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10150 --nextAlloc2ndIndex;
10154 if(nextAlloc2ndIndex != SIZE_MAX)
10156 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10159 if(lastOffset < suballoc.offset)
10162 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10176 lastOffset = suballoc.offset + suballoc.size;
10177 --nextAlloc2ndIndex;
10183 if(lastOffset < size)
10185 const VkDeviceSize unusedRangeSize = size - lastOffset;
10201 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10203 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10204 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10205 const VkDeviceSize size = GetSize();
10206 const size_t suballoc1stCount = suballocations1st.size();
10207 const size_t suballoc2ndCount = suballocations2nd.size();
10209 inoutStats.
size += size;
10211 VkDeviceSize lastOffset = 0;
10213 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10215 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10216 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10217 while(lastOffset < freeSpace2ndTo1stEnd)
10220 while(nextAlloc2ndIndex < suballoc2ndCount &&
10221 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10223 ++nextAlloc2ndIndex;
10227 if(nextAlloc2ndIndex < suballoc2ndCount)
10229 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10232 if(lastOffset < suballoc.offset)
10235 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10246 lastOffset = suballoc.offset + suballoc.size;
10247 ++nextAlloc2ndIndex;
10252 if(lastOffset < freeSpace2ndTo1stEnd)
10255 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10262 lastOffset = freeSpace2ndTo1stEnd;
10267 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10268 const VkDeviceSize freeSpace1stTo2ndEnd =
10269 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10270 while(lastOffset < freeSpace1stTo2ndEnd)
10273 while(nextAlloc1stIndex < suballoc1stCount &&
10274 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10276 ++nextAlloc1stIndex;
10280 if(nextAlloc1stIndex < suballoc1stCount)
10282 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10285 if(lastOffset < suballoc.offset)
10288 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10299 lastOffset = suballoc.offset + suballoc.size;
10300 ++nextAlloc1stIndex;
10305 if(lastOffset < freeSpace1stTo2ndEnd)
10308 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10315 lastOffset = freeSpace1stTo2ndEnd;
10319 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10321 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10322 while(lastOffset < size)
10325 while(nextAlloc2ndIndex != SIZE_MAX &&
10326 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10328 --nextAlloc2ndIndex;
10332 if(nextAlloc2ndIndex != SIZE_MAX)
10334 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10337 if(lastOffset < suballoc.offset)
10340 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10351 lastOffset = suballoc.offset + suballoc.size;
10352 --nextAlloc2ndIndex;
10357 if(lastOffset < size)
10360 const VkDeviceSize unusedRangeSize = size - lastOffset;
10373 #if VMA_STATS_STRING_ENABLED
10374 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10376 const VkDeviceSize size = GetSize();
10377 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10378 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10379 const size_t suballoc1stCount = suballocations1st.size();
10380 const size_t suballoc2ndCount = suballocations2nd.size();
10384 size_t unusedRangeCount = 0;
10385 VkDeviceSize usedBytes = 0;
10387 VkDeviceSize lastOffset = 0;
10389 size_t alloc2ndCount = 0;
10390 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10392 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10393 size_t nextAlloc2ndIndex = 0;
10394 while(lastOffset < freeSpace2ndTo1stEnd)
10397 while(nextAlloc2ndIndex < suballoc2ndCount &&
10398 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10400 ++nextAlloc2ndIndex;
10404 if(nextAlloc2ndIndex < suballoc2ndCount)
10406 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10409 if(lastOffset < suballoc.offset)
10412 ++unusedRangeCount;
10418 usedBytes += suballoc.size;
10421 lastOffset = suballoc.offset + suballoc.size;
10422 ++nextAlloc2ndIndex;
10427 if(lastOffset < freeSpace2ndTo1stEnd)
10430 ++unusedRangeCount;
10434 lastOffset = freeSpace2ndTo1stEnd;
10439 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10440 size_t alloc1stCount = 0;
10441 const VkDeviceSize freeSpace1stTo2ndEnd =
10442 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10443 while(lastOffset < freeSpace1stTo2ndEnd)
10446 while(nextAlloc1stIndex < suballoc1stCount &&
10447 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10449 ++nextAlloc1stIndex;
10453 if(nextAlloc1stIndex < suballoc1stCount)
10455 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10458 if(lastOffset < suballoc.offset)
10461 ++unusedRangeCount;
10467 usedBytes += suballoc.size;
10470 lastOffset = suballoc.offset + suballoc.size;
10471 ++nextAlloc1stIndex;
10476 if(lastOffset < size)
10479 ++unusedRangeCount;
10483 lastOffset = freeSpace1stTo2ndEnd;
10487 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10489 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10490 while(lastOffset < size)
10493 while(nextAlloc2ndIndex != SIZE_MAX &&
10494 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10496 --nextAlloc2ndIndex;
10500 if(nextAlloc2ndIndex != SIZE_MAX)
10502 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10505 if(lastOffset < suballoc.offset)
10508 ++unusedRangeCount;
10514 usedBytes += suballoc.size;
10517 lastOffset = suballoc.offset + suballoc.size;
10518 --nextAlloc2ndIndex;
10523 if(lastOffset < size)
10526 ++unusedRangeCount;
10535 const VkDeviceSize unusedBytes = size - usedBytes;
10536 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10541 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10543 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10544 size_t nextAlloc2ndIndex = 0;
10545 while(lastOffset < freeSpace2ndTo1stEnd)
10548 while(nextAlloc2ndIndex < suballoc2ndCount &&
10549 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10551 ++nextAlloc2ndIndex;
10555 if(nextAlloc2ndIndex < suballoc2ndCount)
10557 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10560 if(lastOffset < suballoc.offset)
10563 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10564 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10569 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10572 lastOffset = suballoc.offset + suballoc.size;
10573 ++nextAlloc2ndIndex;
10578 if(lastOffset < freeSpace2ndTo1stEnd)
10581 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10582 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10586 lastOffset = freeSpace2ndTo1stEnd;
10591 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10592 while(lastOffset < freeSpace1stTo2ndEnd)
10595 while(nextAlloc1stIndex < suballoc1stCount &&
10596 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10598 ++nextAlloc1stIndex;
10602 if(nextAlloc1stIndex < suballoc1stCount)
10604 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10607 if(lastOffset < suballoc.offset)
10610 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10611 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10616 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10619 lastOffset = suballoc.offset + suballoc.size;
10620 ++nextAlloc1stIndex;
10625 if(lastOffset < freeSpace1stTo2ndEnd)
10628 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10629 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10633 lastOffset = freeSpace1stTo2ndEnd;
10637 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10639 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10640 while(lastOffset < size)
10643 while(nextAlloc2ndIndex != SIZE_MAX &&
10644 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10646 --nextAlloc2ndIndex;
10650 if(nextAlloc2ndIndex != SIZE_MAX)
10652 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10655 if(lastOffset < suballoc.offset)
10658 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10659 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10664 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10667 lastOffset = suballoc.offset + suballoc.size;
10668 --nextAlloc2ndIndex;
10673 if(lastOffset < size)
10676 const VkDeviceSize unusedRangeSize = size - lastOffset;
10677 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10686 PrintDetailedMap_End(json);
10688 #endif // #if VMA_STATS_STRING_ENABLED
10690 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10691 uint32_t currentFrameIndex,
10692 uint32_t frameInUseCount,
10693 VkDeviceSize bufferImageGranularity,
10694 VkDeviceSize allocSize,
10695 VkDeviceSize allocAlignment,
10697 VmaSuballocationType allocType,
10698 bool canMakeOtherLost,
10700 VmaAllocationRequest* pAllocationRequest)
10702 VMA_ASSERT(allocSize > 0);
10703 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10704 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10705 VMA_HEAVY_ASSERT(Validate());
10706 return upperAddress ?
10707 CreateAllocationRequest_UpperAddress(
10708 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10709 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10710 CreateAllocationRequest_LowerAddress(
10711 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10712 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10715 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10716 uint32_t currentFrameIndex,
10717 uint32_t frameInUseCount,
10718 VkDeviceSize bufferImageGranularity,
10719 VkDeviceSize allocSize,
10720 VkDeviceSize allocAlignment,
10721 VmaSuballocationType allocType,
10722 bool canMakeOtherLost,
10724 VmaAllocationRequest* pAllocationRequest)
10726 const VkDeviceSize size = GetSize();
10727 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10728 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10730 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10732 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10737 if(allocSize > size)
10741 VkDeviceSize resultBaseOffset = size - allocSize;
10742 if(!suballocations2nd.empty())
10744 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10745 resultBaseOffset = lastSuballoc.offset - allocSize;
10746 if(allocSize > lastSuballoc.offset)
10753 VkDeviceSize resultOffset = resultBaseOffset;
10756 if(VMA_DEBUG_MARGIN > 0)
10758 if(resultOffset < VMA_DEBUG_MARGIN)
10762 resultOffset -= VMA_DEBUG_MARGIN;
10766 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10770 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10772 bool bufferImageGranularityConflict =
false;
10773 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10775 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10776 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10778 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10780 bufferImageGranularityConflict =
true;
10788 if(bufferImageGranularityConflict)
10790 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10795 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10796 suballocations1st.back().offset + suballocations1st.back().size :
10798 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10802 if(bufferImageGranularity > 1)
10804 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10806 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10807 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10809 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10823 pAllocationRequest->offset = resultOffset;
10824 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10825 pAllocationRequest->sumItemSize = 0;
10827 pAllocationRequest->itemsToMakeLostCount = 0;
10828 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10835 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10836 uint32_t currentFrameIndex,
10837 uint32_t frameInUseCount,
10838 VkDeviceSize bufferImageGranularity,
10839 VkDeviceSize allocSize,
10840 VkDeviceSize allocAlignment,
10841 VmaSuballocationType allocType,
10842 bool canMakeOtherLost,
10844 VmaAllocationRequest* pAllocationRequest)
10846 const VkDeviceSize size = GetSize();
10847 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10848 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10850 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10854 VkDeviceSize resultBaseOffset = 0;
10855 if(!suballocations1st.empty())
10857 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10858 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10862 VkDeviceSize resultOffset = resultBaseOffset;
10865 if(VMA_DEBUG_MARGIN > 0)
10867 resultOffset += VMA_DEBUG_MARGIN;
10871 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10875 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10877 bool bufferImageGranularityConflict =
false;
10878 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10880 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10881 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10883 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10885 bufferImageGranularityConflict =
true;
10893 if(bufferImageGranularityConflict)
10895 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10899 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10900 suballocations2nd.back().offset : size;
10903 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10907 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10909 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10911 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10912 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10914 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10928 pAllocationRequest->offset = resultOffset;
10929 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10930 pAllocationRequest->sumItemSize = 0;
10932 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10933 pAllocationRequest->itemsToMakeLostCount = 0;
10940 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10942 VMA_ASSERT(!suballocations1st.empty());
10944 VkDeviceSize resultBaseOffset = 0;
10945 if(!suballocations2nd.empty())
10947 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10948 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10952 VkDeviceSize resultOffset = resultBaseOffset;
10955 if(VMA_DEBUG_MARGIN > 0)
10957 resultOffset += VMA_DEBUG_MARGIN;
10961 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10965 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10967 bool bufferImageGranularityConflict =
false;
10968 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10970 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10971 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10973 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10975 bufferImageGranularityConflict =
true;
10983 if(bufferImageGranularityConflict)
10985 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10989 pAllocationRequest->itemsToMakeLostCount = 0;
10990 pAllocationRequest->sumItemSize = 0;
10991 size_t index1st = m_1stNullItemsBeginCount;
10993 if(canMakeOtherLost)
10995 while(index1st < suballocations1st.size() &&
10996 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10999 const VmaSuballocation& suballoc = suballocations1st[index1st];
11000 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11006 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11007 if(suballoc.hAllocation->CanBecomeLost() &&
11008 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11010 ++pAllocationRequest->itemsToMakeLostCount;
11011 pAllocationRequest->sumItemSize += suballoc.size;
11023 if(bufferImageGranularity > 1)
11025 while(index1st < suballocations1st.size())
11027 const VmaSuballocation& suballoc = suballocations1st[index1st];
11028 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11030 if(suballoc.hAllocation != VK_NULL_HANDLE)
11033 if(suballoc.hAllocation->CanBecomeLost() &&
11034 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11036 ++pAllocationRequest->itemsToMakeLostCount;
11037 pAllocationRequest->sumItemSize += suballoc.size;
11055 if(index1st == suballocations1st.size() &&
11056 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11059 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11064 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11065 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11069 if(bufferImageGranularity > 1)
11071 for(
size_t nextSuballocIndex = index1st;
11072 nextSuballocIndex < suballocations1st.size();
11073 nextSuballocIndex++)
11075 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11076 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11078 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11092 pAllocationRequest->offset = resultOffset;
11093 pAllocationRequest->sumFreeSize =
11094 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11096 - pAllocationRequest->sumItemSize;
11097 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11106 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11107 uint32_t currentFrameIndex,
11108 uint32_t frameInUseCount,
11109 VmaAllocationRequest* pAllocationRequest)
11111 if(pAllocationRequest->itemsToMakeLostCount == 0)
11116 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11119 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11120 size_t index = m_1stNullItemsBeginCount;
11121 size_t madeLostCount = 0;
11122 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11124 if(index == suballocations->size())
11128 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11130 suballocations = &AccessSuballocations2nd();
11134 VMA_ASSERT(!suballocations->empty());
11136 VmaSuballocation& suballoc = (*suballocations)[index];
11137 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11139 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11140 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11141 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11143 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11144 suballoc.hAllocation = VK_NULL_HANDLE;
11145 m_SumFreeSize += suballoc.size;
11146 if(suballocations == &AccessSuballocations1st())
11148 ++m_1stNullItemsMiddleCount;
11152 ++m_2ndNullItemsCount;
11164 CleanupAfterFree();
11170 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11172 uint32_t lostAllocationCount = 0;
11174 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11175 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11177 VmaSuballocation& suballoc = suballocations1st[i];
11178 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11179 suballoc.hAllocation->CanBecomeLost() &&
11180 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11182 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11183 suballoc.hAllocation = VK_NULL_HANDLE;
11184 ++m_1stNullItemsMiddleCount;
11185 m_SumFreeSize += suballoc.size;
11186 ++lostAllocationCount;
11190 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11191 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11193 VmaSuballocation& suballoc = suballocations2nd[i];
11194 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11195 suballoc.hAllocation->CanBecomeLost() &&
11196 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11198 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11199 suballoc.hAllocation = VK_NULL_HANDLE;
11200 ++m_2ndNullItemsCount;
11201 m_SumFreeSize += suballoc.size;
11202 ++lostAllocationCount;
11206 if(lostAllocationCount)
11208 CleanupAfterFree();
11211 return lostAllocationCount;
11214 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11216 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11217 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11219 const VmaSuballocation& suballoc = suballocations1st[i];
11220 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11222 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11224 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11225 return VK_ERROR_VALIDATION_FAILED_EXT;
11227 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11229 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11230 return VK_ERROR_VALIDATION_FAILED_EXT;
11235 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11236 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11238 const VmaSuballocation& suballoc = suballocations2nd[i];
11239 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11241 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11243 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11244 return VK_ERROR_VALIDATION_FAILED_EXT;
11246 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11248 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11249 return VK_ERROR_VALIDATION_FAILED_EXT;
11257 void VmaBlockMetadata_Linear::Alloc(
11258 const VmaAllocationRequest& request,
11259 VmaSuballocationType type,
11260 VkDeviceSize allocSize,
11263 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11265 switch(request.type)
11267 case VmaAllocationRequestType::UpperAddress:
11269 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11270 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11271 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11272 suballocations2nd.push_back(newSuballoc);
11273 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11276 case VmaAllocationRequestType::EndOf1st:
11278 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11280 VMA_ASSERT(suballocations1st.empty() ||
11281 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11283 VMA_ASSERT(request.offset + allocSize <= GetSize());
11285 suballocations1st.push_back(newSuballoc);
11288 case VmaAllocationRequestType::EndOf2nd:
11290 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11292 VMA_ASSERT(!suballocations1st.empty() &&
11293 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11294 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11296 switch(m_2ndVectorMode)
11298 case SECOND_VECTOR_EMPTY:
11300 VMA_ASSERT(suballocations2nd.empty());
11301 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11303 case SECOND_VECTOR_RING_BUFFER:
11305 VMA_ASSERT(!suballocations2nd.empty());
11307 case SECOND_VECTOR_DOUBLE_STACK:
11308 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11314 suballocations2nd.push_back(newSuballoc);
11318 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11321 m_SumFreeSize -= newSuballoc.size;
11324 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11326 FreeAtOffset(allocation->GetOffset());
11329 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11331 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11332 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11334 if(!suballocations1st.empty())
11337 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11338 if(firstSuballoc.offset == offset)
11340 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11341 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11342 m_SumFreeSize += firstSuballoc.size;
11343 ++m_1stNullItemsBeginCount;
11344 CleanupAfterFree();
11350 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11351 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11353 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11354 if(lastSuballoc.offset == offset)
11356 m_SumFreeSize += lastSuballoc.size;
11357 suballocations2nd.pop_back();
11358 CleanupAfterFree();
11363 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11365 VmaSuballocation& lastSuballoc = suballocations1st.back();
11366 if(lastSuballoc.offset == offset)
11368 m_SumFreeSize += lastSuballoc.size;
11369 suballocations1st.pop_back();
11370 CleanupAfterFree();
11377 VmaSuballocation refSuballoc;
11378 refSuballoc.offset = offset;
11380 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11381 suballocations1st.begin() + m_1stNullItemsBeginCount,
11382 suballocations1st.end(),
11384 VmaSuballocationOffsetLess());
11385 if(it != suballocations1st.end())
11387 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11388 it->hAllocation = VK_NULL_HANDLE;
11389 ++m_1stNullItemsMiddleCount;
11390 m_SumFreeSize += it->size;
11391 CleanupAfterFree();
11396 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11399 VmaSuballocation refSuballoc;
11400 refSuballoc.offset = offset;
11402 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11403 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11404 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11405 if(it != suballocations2nd.end())
11407 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11408 it->hAllocation = VK_NULL_HANDLE;
11409 ++m_2ndNullItemsCount;
11410 m_SumFreeSize += it->size;
11411 CleanupAfterFree();
11416 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11419 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11421 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11422 const size_t suballocCount = AccessSuballocations1st().size();
11423 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11426 void VmaBlockMetadata_Linear::CleanupAfterFree()
11428 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11429 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11433 suballocations1st.clear();
11434 suballocations2nd.clear();
11435 m_1stNullItemsBeginCount = 0;
11436 m_1stNullItemsMiddleCount = 0;
11437 m_2ndNullItemsCount = 0;
11438 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11442 const size_t suballoc1stCount = suballocations1st.size();
11443 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11444 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11447 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11448 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11450 ++m_1stNullItemsBeginCount;
11451 --m_1stNullItemsMiddleCount;
11455 while(m_1stNullItemsMiddleCount > 0 &&
11456 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11458 --m_1stNullItemsMiddleCount;
11459 suballocations1st.pop_back();
11463 while(m_2ndNullItemsCount > 0 &&
11464 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11466 --m_2ndNullItemsCount;
11467 suballocations2nd.pop_back();
11471 while(m_2ndNullItemsCount > 0 &&
11472 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11474 --m_2ndNullItemsCount;
11475 VmaVectorRemove(suballocations2nd, 0);
11478 if(ShouldCompact1st())
11480 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11481 size_t srcIndex = m_1stNullItemsBeginCount;
11482 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11484 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11488 if(dstIndex != srcIndex)
11490 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11494 suballocations1st.resize(nonNullItemCount);
11495 m_1stNullItemsBeginCount = 0;
11496 m_1stNullItemsMiddleCount = 0;
11500 if(suballocations2nd.empty())
11502 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11506 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11508 suballocations1st.clear();
11509 m_1stNullItemsBeginCount = 0;
11511 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11514 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11515 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11516 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11517 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11519 ++m_1stNullItemsBeginCount;
11520 --m_1stNullItemsMiddleCount;
11522 m_2ndNullItemsCount = 0;
11523 m_1stVectorIndex ^= 1;
11528 VMA_HEAVY_ASSERT(Validate());
11535 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11536 VmaBlockMetadata(hAllocator),
11538 m_AllocationCount(0),
11542 memset(m_FreeList, 0,
sizeof(m_FreeList));
11545 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11547 DeleteNode(m_Root);
11550 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11552 VmaBlockMetadata::Init(size);
11554 m_UsableSize = VmaPrevPow2(size);
11555 m_SumFreeSize = m_UsableSize;
11559 while(m_LevelCount < MAX_LEVELS &&
11560 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11565 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11566 rootNode->offset = 0;
11567 rootNode->type = Node::TYPE_FREE;
11568 rootNode->parent = VMA_NULL;
11569 rootNode->buddy = VMA_NULL;
11572 AddToFreeListFront(0, rootNode);
11575 bool VmaBlockMetadata_Buddy::Validate()
const
11578 ValidationContext ctx;
11579 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11581 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11583 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11584 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11587 for(uint32_t level = 0; level < m_LevelCount; ++level)
11589 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11590 m_FreeList[level].front->free.prev == VMA_NULL);
11592 for(Node* node = m_FreeList[level].front;
11594 node = node->free.next)
11596 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11598 if(node->free.next == VMA_NULL)
11600 VMA_VALIDATE(m_FreeList[level].back == node);
11604 VMA_VALIDATE(node->free.next->free.prev == node);
11610 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11612 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11618 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11620 for(uint32_t level = 0; level < m_LevelCount; ++level)
11622 if(m_FreeList[level].front != VMA_NULL)
11624 return LevelToNodeSize(level);
11630 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11632 const VkDeviceSize unusableSize = GetUnusableSize();
11643 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11645 if(unusableSize > 0)
11654 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11656 const VkDeviceSize unusableSize = GetUnusableSize();
11658 inoutStats.
size += GetSize();
11659 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11664 if(unusableSize > 0)
11671 #if VMA_STATS_STRING_ENABLED
11673 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11677 CalcAllocationStatInfo(stat);
11679 PrintDetailedMap_Begin(
11685 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11687 const VkDeviceSize unusableSize = GetUnusableSize();
11688 if(unusableSize > 0)
11690 PrintDetailedMap_UnusedRange(json,
11695 PrintDetailedMap_End(json);
11698 #endif // #if VMA_STATS_STRING_ENABLED
11700 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11701 uint32_t currentFrameIndex,
11702 uint32_t frameInUseCount,
11703 VkDeviceSize bufferImageGranularity,
11704 VkDeviceSize allocSize,
11705 VkDeviceSize allocAlignment,
11707 VmaSuballocationType allocType,
11708 bool canMakeOtherLost,
11710 VmaAllocationRequest* pAllocationRequest)
11712 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11716 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11717 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11718 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11720 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11721 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11724 if(allocSize > m_UsableSize)
11729 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11730 for(uint32_t level = targetLevel + 1; level--; )
11732 for(Node* freeNode = m_FreeList[level].front;
11733 freeNode != VMA_NULL;
11734 freeNode = freeNode->free.next)
11736 if(freeNode->offset % allocAlignment == 0)
11738 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11739 pAllocationRequest->offset = freeNode->offset;
11740 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11741 pAllocationRequest->sumItemSize = 0;
11742 pAllocationRequest->itemsToMakeLostCount = 0;
11743 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11752 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11753 uint32_t currentFrameIndex,
11754 uint32_t frameInUseCount,
11755 VmaAllocationRequest* pAllocationRequest)
11761 return pAllocationRequest->itemsToMakeLostCount == 0;
11764 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11773 void VmaBlockMetadata_Buddy::Alloc(
11774 const VmaAllocationRequest& request,
11775 VmaSuballocationType type,
11776 VkDeviceSize allocSize,
11779 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11781 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11782 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11784 Node* currNode = m_FreeList[currLevel].front;
11785 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11786 while(currNode->offset != request.offset)
11788 currNode = currNode->free.next;
11789 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11793 while(currLevel < targetLevel)
11797 RemoveFromFreeList(currLevel, currNode);
11799 const uint32_t childrenLevel = currLevel + 1;
11802 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11803 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11805 leftChild->offset = currNode->offset;
11806 leftChild->type = Node::TYPE_FREE;
11807 leftChild->parent = currNode;
11808 leftChild->buddy = rightChild;
11810 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11811 rightChild->type = Node::TYPE_FREE;
11812 rightChild->parent = currNode;
11813 rightChild->buddy = leftChild;
11816 currNode->type = Node::TYPE_SPLIT;
11817 currNode->split.leftChild = leftChild;
11820 AddToFreeListFront(childrenLevel, rightChild);
11821 AddToFreeListFront(childrenLevel, leftChild);
11826 currNode = m_FreeList[currLevel].front;
11835 VMA_ASSERT(currLevel == targetLevel &&
11836 currNode != VMA_NULL &&
11837 currNode->type == Node::TYPE_FREE);
11838 RemoveFromFreeList(currLevel, currNode);
11841 currNode->type = Node::TYPE_ALLOCATION;
11842 currNode->allocation.alloc = hAllocation;
11844 ++m_AllocationCount;
11846 m_SumFreeSize -= allocSize;
11849 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11851 if(node->type == Node::TYPE_SPLIT)
11853 DeleteNode(node->split.leftChild->buddy);
11854 DeleteNode(node->split.leftChild);
11857 vma_delete(GetAllocationCallbacks(), node);
11860 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11862 VMA_VALIDATE(level < m_LevelCount);
11863 VMA_VALIDATE(curr->parent == parent);
11864 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11865 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11868 case Node::TYPE_FREE:
11870 ctx.calculatedSumFreeSize += levelNodeSize;
11871 ++ctx.calculatedFreeCount;
11873 case Node::TYPE_ALLOCATION:
11874 ++ctx.calculatedAllocationCount;
11875 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11876 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11878 case Node::TYPE_SPLIT:
11880 const uint32_t childrenLevel = level + 1;
11881 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11882 const Node*
const leftChild = curr->split.leftChild;
11883 VMA_VALIDATE(leftChild != VMA_NULL);
11884 VMA_VALIDATE(leftChild->offset == curr->offset);
11885 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11887 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11889 const Node*
const rightChild = leftChild->buddy;
11890 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11891 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11893 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11904 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11907 uint32_t level = 0;
11908 VkDeviceSize currLevelNodeSize = m_UsableSize;
11909 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11910 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11913 currLevelNodeSize = nextLevelNodeSize;
11914 nextLevelNodeSize = currLevelNodeSize >> 1;
11919 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11922 Node* node = m_Root;
11923 VkDeviceSize nodeOffset = 0;
11924 uint32_t level = 0;
11925 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11926 while(node->type == Node::TYPE_SPLIT)
11928 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11929 if(offset < nodeOffset + nextLevelSize)
11931 node = node->split.leftChild;
11935 node = node->split.leftChild->buddy;
11936 nodeOffset += nextLevelSize;
11939 levelNodeSize = nextLevelSize;
11942 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11943 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11946 --m_AllocationCount;
11947 m_SumFreeSize += alloc->GetSize();
11949 node->type = Node::TYPE_FREE;
11952 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11954 RemoveFromFreeList(level, node->buddy);
11955 Node*
const parent = node->parent;
11957 vma_delete(GetAllocationCallbacks(), node->buddy);
11958 vma_delete(GetAllocationCallbacks(), node);
11959 parent->type = Node::TYPE_FREE;
11967 AddToFreeListFront(level, node);
11970 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11974 case Node::TYPE_FREE:
11980 case Node::TYPE_ALLOCATION:
11982 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11988 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11989 if(unusedRangeSize > 0)
11998 case Node::TYPE_SPLIT:
12000 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12001 const Node*
const leftChild = node->split.leftChild;
12002 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12003 const Node*
const rightChild = leftChild->buddy;
12004 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12012 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12014 VMA_ASSERT(node->type == Node::TYPE_FREE);
12017 Node*
const frontNode = m_FreeList[level].front;
12018 if(frontNode == VMA_NULL)
12020 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12021 node->free.prev = node->free.next = VMA_NULL;
12022 m_FreeList[level].front = m_FreeList[level].back = node;
12026 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12027 node->free.prev = VMA_NULL;
12028 node->free.next = frontNode;
12029 frontNode->free.prev = node;
12030 m_FreeList[level].front = node;
12034 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12036 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12039 if(node->free.prev == VMA_NULL)
12041 VMA_ASSERT(m_FreeList[level].front == node);
12042 m_FreeList[level].front = node->free.next;
12046 Node*
const prevFreeNode = node->free.prev;
12047 VMA_ASSERT(prevFreeNode->free.next == node);
12048 prevFreeNode->free.next = node->free.next;
12052 if(node->free.next == VMA_NULL)
12054 VMA_ASSERT(m_FreeList[level].back == node);
12055 m_FreeList[level].back = node->free.prev;
12059 Node*
const nextFreeNode = node->free.next;
12060 VMA_ASSERT(nextFreeNode->free.prev == node);
12061 nextFreeNode->free.prev = node->free.prev;
12065 #if VMA_STATS_STRING_ENABLED
12066 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12070 case Node::TYPE_FREE:
12071 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12073 case Node::TYPE_ALLOCATION:
12075 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12076 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12077 if(allocSize < levelNodeSize)
12079 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12083 case Node::TYPE_SPLIT:
12085 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12086 const Node*
const leftChild = node->split.leftChild;
12087 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12088 const Node*
const rightChild = leftChild->buddy;
12089 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12096 #endif // #if VMA_STATS_STRING_ENABLED
12102 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12103 m_pMetadata(VMA_NULL),
12104 m_MemoryTypeIndex(UINT32_MAX),
12106 m_hMemory(VK_NULL_HANDLE),
12108 m_pMappedData(VMA_NULL)
12112 void VmaDeviceMemoryBlock::Init(
12115 uint32_t newMemoryTypeIndex,
12116 VkDeviceMemory newMemory,
12117 VkDeviceSize newSize,
12119 uint32_t algorithm)
12121 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12123 m_hParentPool = hParentPool;
12124 m_MemoryTypeIndex = newMemoryTypeIndex;
12126 m_hMemory = newMemory;
12131 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12134 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12140 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12142 m_pMetadata->Init(newSize);
12145 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12149 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12151 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12152 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12153 m_hMemory = VK_NULL_HANDLE;
12155 vma_delete(allocator, m_pMetadata);
12156 m_pMetadata = VMA_NULL;
12159 bool VmaDeviceMemoryBlock::Validate()
const
12161 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12162 (m_pMetadata->GetSize() != 0));
12164 return m_pMetadata->Validate();
12167 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12169 void* pData =
nullptr;
12170 VkResult res = Map(hAllocator, 1, &pData);
12171 if(res != VK_SUCCESS)
12176 res = m_pMetadata->CheckCorruption(pData);
12178 Unmap(hAllocator, 1);
12183 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12190 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12191 if(m_MapCount != 0)
12193 m_MapCount += count;
12194 VMA_ASSERT(m_pMappedData != VMA_NULL);
12195 if(ppData != VMA_NULL)
12197 *ppData = m_pMappedData;
12203 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12204 hAllocator->m_hDevice,
12210 if(result == VK_SUCCESS)
12212 if(ppData != VMA_NULL)
12214 *ppData = m_pMappedData;
12216 m_MapCount = count;
12222 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12229 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12230 if(m_MapCount >= count)
12232 m_MapCount -= count;
12233 if(m_MapCount == 0)
12235 m_pMappedData = VMA_NULL;
12236 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12241 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12245 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12247 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12248 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12251 VkResult res = Map(hAllocator, 1, &pData);
12252 if(res != VK_SUCCESS)
12257 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12258 VmaWriteMagicValue(pData, allocOffset + allocSize);
12260 Unmap(hAllocator, 1);
12265 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12267 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12268 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12271 VkResult res = Map(hAllocator, 1, &pData);
12272 if(res != VK_SUCCESS)
12277 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12279 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12281 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12283 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12286 Unmap(hAllocator, 1);
12291 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12294 VkDeviceSize allocationLocalOffset,
12298 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12299 hAllocation->GetBlock() ==
this);
12300 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12301 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12302 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12304 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12305 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12308 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12311 VkDeviceSize allocationLocalOffset,
12315 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12316 hAllocation->GetBlock() ==
this);
12317 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12318 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12319 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12321 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12322 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12327 memset(&outInfo, 0,
sizeof(outInfo));
12346 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12354 VmaPool_T::VmaPool_T(
12357 VkDeviceSize preferredBlockSize) :
12361 createInfo.memoryTypeIndex,
12362 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12363 createInfo.minBlockCount,
12364 createInfo.maxBlockCount,
12366 createInfo.frameInUseCount,
12367 createInfo.blockSize != 0,
12374 VmaPool_T::~VmaPool_T()
12378 void VmaPool_T::SetName(
const char* pName)
12380 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12381 VmaFreeString(allocs, m_Name);
12383 if(pName != VMA_NULL)
12385 m_Name = VmaCreateStringCopy(allocs, pName);
12393 #if VMA_STATS_STRING_ENABLED
12395 #endif // #if VMA_STATS_STRING_ENABLED
12397 VmaBlockVector::VmaBlockVector(
12400 uint32_t memoryTypeIndex,
12401 VkDeviceSize preferredBlockSize,
12402 size_t minBlockCount,
12403 size_t maxBlockCount,
12404 VkDeviceSize bufferImageGranularity,
12405 uint32_t frameInUseCount,
12406 bool explicitBlockSize,
12407 uint32_t algorithm) :
12408 m_hAllocator(hAllocator),
12409 m_hParentPool(hParentPool),
12410 m_MemoryTypeIndex(memoryTypeIndex),
12411 m_PreferredBlockSize(preferredBlockSize),
12412 m_MinBlockCount(minBlockCount),
12413 m_MaxBlockCount(maxBlockCount),
12414 m_BufferImageGranularity(bufferImageGranularity),
12415 m_FrameInUseCount(frameInUseCount),
12416 m_ExplicitBlockSize(explicitBlockSize),
12417 m_Algorithm(algorithm),
12418 m_HasEmptyBlock(false),
12419 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12424 VmaBlockVector::~VmaBlockVector()
12426 for(
size_t i = m_Blocks.size(); i--; )
12428 m_Blocks[i]->Destroy(m_hAllocator);
12429 vma_delete(m_hAllocator, m_Blocks[i]);
12433 VkResult VmaBlockVector::CreateMinBlocks()
12435 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12437 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12438 if(res != VK_SUCCESS)
12446 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12448 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12450 const size_t blockCount = m_Blocks.size();
12459 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12461 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12462 VMA_ASSERT(pBlock);
12463 VMA_HEAVY_ASSERT(pBlock->Validate());
12464 pBlock->m_pMetadata->AddPoolStats(*pStats);
12468 bool VmaBlockVector::IsEmpty()
12470 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12471 return m_Blocks.empty();
12474 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12476 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12477 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12478 (VMA_DEBUG_MARGIN > 0) &&
12480 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12483 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12485 VkResult VmaBlockVector::Allocate(
12486 uint32_t currentFrameIndex,
12488 VkDeviceSize alignment,
12490 VmaSuballocationType suballocType,
12491 size_t allocationCount,
12495 VkResult res = VK_SUCCESS;
12497 if(IsCorruptionDetectionEnabled())
12499 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12500 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12504 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12505 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12507 res = AllocatePage(
12513 pAllocations + allocIndex);
12514 if(res != VK_SUCCESS)
12521 if(res != VK_SUCCESS)
12524 while(allocIndex--)
12526 Free(pAllocations[allocIndex]);
12528 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12534 VkResult VmaBlockVector::AllocatePage(
12535 uint32_t currentFrameIndex,
12537 VkDeviceSize alignment,
12539 VmaSuballocationType suballocType,
12547 VkDeviceSize freeMemory;
12549 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12551 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12555 const bool canFallbackToDedicated = !IsCustomPool();
12556 const bool canCreateNewBlock =
12558 (m_Blocks.size() < m_MaxBlockCount) &&
12559 (freeMemory >= size || !canFallbackToDedicated);
12566 canMakeOtherLost =
false;
12570 if(isUpperAddress &&
12573 return VK_ERROR_FEATURE_NOT_PRESENT;
12587 return VK_ERROR_FEATURE_NOT_PRESENT;
12591 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12593 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12601 if(!canMakeOtherLost || canCreateNewBlock)
12610 if(!m_Blocks.empty())
12612 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12613 VMA_ASSERT(pCurrBlock);
12614 VkResult res = AllocateFromBlock(
12624 if(res == VK_SUCCESS)
12626 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12636 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12638 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12639 VMA_ASSERT(pCurrBlock);
12640 VkResult res = AllocateFromBlock(
12650 if(res == VK_SUCCESS)
12652 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12660 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12662 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12663 VMA_ASSERT(pCurrBlock);
12664 VkResult res = AllocateFromBlock(
12674 if(res == VK_SUCCESS)
12676 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12684 if(canCreateNewBlock)
12687 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12688 uint32_t newBlockSizeShift = 0;
12689 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12691 if(!m_ExplicitBlockSize)
12694 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12695 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12697 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12698 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12700 newBlockSize = smallerNewBlockSize;
12701 ++newBlockSizeShift;
12710 size_t newBlockIndex = 0;
12711 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12712 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12714 if(!m_ExplicitBlockSize)
12716 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12718 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12719 if(smallerNewBlockSize >= size)
12721 newBlockSize = smallerNewBlockSize;
12722 ++newBlockSizeShift;
12723 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12724 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12733 if(res == VK_SUCCESS)
12735 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12736 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12738 res = AllocateFromBlock(
12748 if(res == VK_SUCCESS)
12750 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12756 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12763 if(canMakeOtherLost)
12765 uint32_t tryIndex = 0;
12766 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12768 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12769 VmaAllocationRequest bestRequest = {};
12770 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12776 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12778 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12779 VMA_ASSERT(pCurrBlock);
12780 VmaAllocationRequest currRequest = {};
12781 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12784 m_BufferImageGranularity,
12793 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12794 if(pBestRequestBlock == VMA_NULL ||
12795 currRequestCost < bestRequestCost)
12797 pBestRequestBlock = pCurrBlock;
12798 bestRequest = currRequest;
12799 bestRequestCost = currRequestCost;
12801 if(bestRequestCost == 0)
12812 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12814 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12815 VMA_ASSERT(pCurrBlock);
12816 VmaAllocationRequest currRequest = {};
12817 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12820 m_BufferImageGranularity,
12829 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12830 if(pBestRequestBlock == VMA_NULL ||
12831 currRequestCost < bestRequestCost ||
12834 pBestRequestBlock = pCurrBlock;
12835 bestRequest = currRequest;
12836 bestRequestCost = currRequestCost;
12838 if(bestRequestCost == 0 ||
12848 if(pBestRequestBlock != VMA_NULL)
12852 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12853 if(res != VK_SUCCESS)
12859 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12865 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12866 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12867 UpdateHasEmptyBlock();
12868 (*pAllocation)->InitBlockAllocation(
12870 bestRequest.offset,
12877 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12878 VMA_DEBUG_LOG(
" Returned from existing block");
12879 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12880 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12881 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12883 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12885 if(IsCorruptionDetectionEnabled())
12887 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12888 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12903 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12905 return VK_ERROR_TOO_MANY_OBJECTS;
12909 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12912 void VmaBlockVector::Free(
12915 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12917 bool budgetExceeded =
false;
12919 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12921 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12922 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12927 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12929 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12931 if(IsCorruptionDetectionEnabled())
12933 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12934 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12937 if(hAllocation->IsPersistentMap())
12939 pBlock->Unmap(m_hAllocator, 1);
12942 pBlock->m_pMetadata->Free(hAllocation);
12943 VMA_HEAVY_ASSERT(pBlock->Validate());
12945 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12947 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12949 if(pBlock->m_pMetadata->IsEmpty())
12952 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12954 pBlockToDelete = pBlock;
12961 else if(m_HasEmptyBlock && canDeleteBlock)
12963 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12964 if(pLastBlock->m_pMetadata->IsEmpty())
12966 pBlockToDelete = pLastBlock;
12967 m_Blocks.pop_back();
12971 UpdateHasEmptyBlock();
12972 IncrementallySortBlocks();
12977 if(pBlockToDelete != VMA_NULL)
12979 VMA_DEBUG_LOG(
" Deleted empty block");
12980 pBlockToDelete->Destroy(m_hAllocator);
12981 vma_delete(m_hAllocator, pBlockToDelete);
12985 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12987 VkDeviceSize result = 0;
12988 for(
size_t i = m_Blocks.size(); i--; )
12990 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12991 if(result >= m_PreferredBlockSize)
12999 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13001 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13003 if(m_Blocks[blockIndex] == pBlock)
13005 VmaVectorRemove(m_Blocks, blockIndex);
13012 void VmaBlockVector::IncrementallySortBlocks()
13017 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13019 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13021 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13028 VkResult VmaBlockVector::AllocateFromBlock(
13029 VmaDeviceMemoryBlock* pBlock,
13030 uint32_t currentFrameIndex,
13032 VkDeviceSize alignment,
13035 VmaSuballocationType suballocType,
13044 VmaAllocationRequest currRequest = {};
13045 if(pBlock->m_pMetadata->CreateAllocationRequest(
13048 m_BufferImageGranularity,
13058 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13062 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13063 if(res != VK_SUCCESS)
13069 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13070 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13071 UpdateHasEmptyBlock();
13072 (*pAllocation)->InitBlockAllocation(
13074 currRequest.offset,
13081 VMA_HEAVY_ASSERT(pBlock->Validate());
13082 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13083 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13084 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13086 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13088 if(IsCorruptionDetectionEnabled())
13090 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13091 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13095 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13098 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13100 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13101 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13102 allocInfo.allocationSize = blockSize;
13104 #if VMA_BUFFER_DEVICE_ADDRESS
13106 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13107 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13109 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13110 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13112 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
13114 VkDeviceMemory mem = VK_NULL_HANDLE;
13115 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13124 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13130 allocInfo.allocationSize,
13134 m_Blocks.push_back(pBlock);
13135 if(pNewBlockIndex != VMA_NULL)
13137 *pNewBlockIndex = m_Blocks.size() - 1;
13143 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13144 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13145 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13147 const size_t blockCount = m_Blocks.size();
13148 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13152 BLOCK_FLAG_USED = 0x00000001,
13153 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13161 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13162 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13163 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13166 const size_t moveCount = moves.size();
13167 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13169 const VmaDefragmentationMove& move = moves[moveIndex];
13170 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13171 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13174 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13177 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13179 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13180 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13181 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13183 currBlockInfo.pMappedData = pBlock->GetMappedData();
13185 if(currBlockInfo.pMappedData == VMA_NULL)
13187 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13188 if(pDefragCtx->res == VK_SUCCESS)
13190 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13197 if(pDefragCtx->res == VK_SUCCESS)
13199 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13200 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13202 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13204 const VmaDefragmentationMove& move = moves[moveIndex];
13206 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13207 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13209 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13214 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13215 memRange.memory = pSrcBlock->GetDeviceMemory();
13216 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13217 memRange.size = VMA_MIN(
13218 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13219 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13220 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13225 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13226 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13227 static_cast<size_t>(move.size));
13229 if(IsCorruptionDetectionEnabled())
13231 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13232 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13238 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13239 memRange.memory = pDstBlock->GetDeviceMemory();
13240 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13241 memRange.size = VMA_MIN(
13242 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13243 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13244 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13251 for(
size_t blockIndex = blockCount; blockIndex--; )
13253 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13254 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13256 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13257 pBlock->Unmap(m_hAllocator, 1);
13262 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13263 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13264 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13265 VkCommandBuffer commandBuffer)
13267 const size_t blockCount = m_Blocks.size();
13269 pDefragCtx->blockContexts.resize(blockCount);
13270 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13273 const size_t moveCount = moves.size();
13274 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13276 const VmaDefragmentationMove& move = moves[moveIndex];
13281 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13282 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13286 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13290 VkBufferCreateInfo bufCreateInfo;
13291 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13293 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13295 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13296 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13297 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13299 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13300 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13301 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13302 if(pDefragCtx->res == VK_SUCCESS)
13304 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13305 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13312 if(pDefragCtx->res == VK_SUCCESS)
13314 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13316 const VmaDefragmentationMove& move = moves[moveIndex];
13318 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13319 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13321 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13323 VkBufferCopy region = {
13327 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13328 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13333 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13335 pDefragCtx->res = VK_NOT_READY;
13341 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13343 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13344 if(pBlock->m_pMetadata->IsEmpty())
13346 if(m_Blocks.size() > m_MinBlockCount)
13348 if(pDefragmentationStats != VMA_NULL)
13351 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13354 VmaVectorRemove(m_Blocks, blockIndex);
13355 pBlock->Destroy(m_hAllocator);
13356 vma_delete(m_hAllocator, pBlock);
13364 UpdateHasEmptyBlock();
13367 void VmaBlockVector::UpdateHasEmptyBlock()
13369 m_HasEmptyBlock =
false;
13370 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13372 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13373 if(pBlock->m_pMetadata->IsEmpty())
13375 m_HasEmptyBlock =
true;
13381 #if VMA_STATS_STRING_ENABLED
13383 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13385 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13387 json.BeginObject();
13391 const char* poolName = m_hParentPool->GetName();
13392 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13394 json.WriteString(
"Name");
13395 json.WriteString(poolName);
13398 json.WriteString(
"MemoryTypeIndex");
13399 json.WriteNumber(m_MemoryTypeIndex);
13401 json.WriteString(
"BlockSize");
13402 json.WriteNumber(m_PreferredBlockSize);
13404 json.WriteString(
"BlockCount");
13405 json.BeginObject(
true);
13406 if(m_MinBlockCount > 0)
13408 json.WriteString(
"Min");
13409 json.WriteNumber((uint64_t)m_MinBlockCount);
13411 if(m_MaxBlockCount < SIZE_MAX)
13413 json.WriteString(
"Max");
13414 json.WriteNumber((uint64_t)m_MaxBlockCount);
13416 json.WriteString(
"Cur");
13417 json.WriteNumber((uint64_t)m_Blocks.size());
13420 if(m_FrameInUseCount > 0)
13422 json.WriteString(
"FrameInUseCount");
13423 json.WriteNumber(m_FrameInUseCount);
13426 if(m_Algorithm != 0)
13428 json.WriteString(
"Algorithm");
13429 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13434 json.WriteString(
"PreferredBlockSize");
13435 json.WriteNumber(m_PreferredBlockSize);
13438 json.WriteString(
"Blocks");
13439 json.BeginObject();
13440 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13442 json.BeginString();
13443 json.ContinueString(m_Blocks[i]->GetId());
13446 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13453 #endif // #if VMA_STATS_STRING_ENABLED
13455 void VmaBlockVector::Defragment(
13456 class VmaBlockVectorDefragmentationContext* pCtx,
13458 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13459 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13460 VkCommandBuffer commandBuffer)
13462 pCtx->res = VK_SUCCESS;
13464 const VkMemoryPropertyFlags memPropFlags =
13465 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13466 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13468 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13470 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13471 !IsCorruptionDetectionEnabled() &&
13472 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13475 if(canDefragmentOnCpu || canDefragmentOnGpu)
13477 bool defragmentOnGpu;
13479 if(canDefragmentOnGpu != canDefragmentOnCpu)
13481 defragmentOnGpu = canDefragmentOnGpu;
13486 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13487 m_hAllocator->IsIntegratedGpu();
13490 bool overlappingMoveSupported = !defragmentOnGpu;
13492 if(m_hAllocator->m_UseMutex)
13496 if(!m_Mutex.TryLockWrite())
13498 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13504 m_Mutex.LockWrite();
13505 pCtx->mutexLocked =
true;
13509 pCtx->Begin(overlappingMoveSupported, flags);
13513 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13514 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13515 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13518 if(pStats != VMA_NULL)
13520 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13521 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13524 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13525 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13526 if(defragmentOnGpu)
13528 maxGpuBytesToMove -= bytesMoved;
13529 maxGpuAllocationsToMove -= allocationsMoved;
13533 maxCpuBytesToMove -= bytesMoved;
13534 maxCpuAllocationsToMove -= allocationsMoved;
13540 if(m_hAllocator->m_UseMutex)
13541 m_Mutex.UnlockWrite();
13543 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13544 pCtx->res = VK_NOT_READY;
13549 if(pCtx->res >= VK_SUCCESS)
13551 if(defragmentOnGpu)
13553 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13557 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13563 void VmaBlockVector::DefragmentationEnd(
13564 class VmaBlockVectorDefragmentationContext* pCtx,
13570 VMA_ASSERT(pCtx->mutexLocked ==
false);
13574 m_Mutex.LockWrite();
13575 pCtx->mutexLocked =
true;
13579 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13582 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13584 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13585 if(blockCtx.hBuffer)
13587 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13591 if(pCtx->res >= VK_SUCCESS)
13593 FreeEmptyBlocks(pStats);
13597 if(pCtx->mutexLocked)
13599 VMA_ASSERT(m_hAllocator->m_UseMutex);
13600 m_Mutex.UnlockWrite();
13604 uint32_t VmaBlockVector::ProcessDefragmentations(
13605 class VmaBlockVectorDefragmentationContext *pCtx,
13608 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13610 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13612 for(uint32_t i = 0; i < moveCount; ++ i)
13614 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13617 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13618 pMove->
offset = move.dstOffset;
13623 pCtx->defragmentationMovesProcessed += moveCount;
13628 void VmaBlockVector::CommitDefragmentations(
13629 class VmaBlockVectorDefragmentationContext *pCtx,
13632 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13634 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13636 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13638 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13639 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13642 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13643 FreeEmptyBlocks(pStats);
13646 size_t VmaBlockVector::CalcAllocationCount()
const
13649 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13651 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13656 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13658 if(m_BufferImageGranularity == 1)
13662 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13663 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13665 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13666 VMA_ASSERT(m_Algorithm == 0);
13667 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13668 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13676 void VmaBlockVector::MakePoolAllocationsLost(
13677 uint32_t currentFrameIndex,
13678 size_t* pLostAllocationCount)
13680 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13681 size_t lostAllocationCount = 0;
13682 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13684 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13685 VMA_ASSERT(pBlock);
13686 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13688 if(pLostAllocationCount != VMA_NULL)
13690 *pLostAllocationCount = lostAllocationCount;
13694 VkResult VmaBlockVector::CheckCorruption()
13696 if(!IsCorruptionDetectionEnabled())
13698 return VK_ERROR_FEATURE_NOT_PRESENT;
13701 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13702 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13704 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13705 VMA_ASSERT(pBlock);
13706 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13707 if(res != VK_SUCCESS)
13715 void VmaBlockVector::AddStats(
VmaStats* pStats)
13717 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13718 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13720 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13722 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13724 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13725 VMA_ASSERT(pBlock);
13726 VMA_HEAVY_ASSERT(pBlock->Validate());
13728 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13729 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13730 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13731 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13738 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13740 VmaBlockVector* pBlockVector,
13741 uint32_t currentFrameIndex,
13742 bool overlappingMoveSupported) :
13743 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13744 m_AllocationCount(0),
13745 m_AllAllocations(false),
13747 m_AllocationsMoved(0),
13748 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13751 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13752 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13754 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13755 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13756 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13757 m_Blocks.push_back(pBlockInfo);
13761 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13764 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13766 for(
size_t i = m_Blocks.size(); i--; )
13768 vma_delete(m_hAllocator, m_Blocks[i]);
13772 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13775 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13777 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13778 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13779 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13781 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13782 (*it)->m_Allocations.push_back(allocInfo);
13789 ++m_AllocationCount;
13793 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13794 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13795 VkDeviceSize maxBytesToMove,
13796 uint32_t maxAllocationsToMove,
13797 bool freeOldAllocations)
13799 if(m_Blocks.empty())
13812 size_t srcBlockMinIndex = 0;
13825 size_t srcBlockIndex = m_Blocks.size() - 1;
13826 size_t srcAllocIndex = SIZE_MAX;
13832 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13834 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13837 if(srcBlockIndex == srcBlockMinIndex)
13844 srcAllocIndex = SIZE_MAX;
13849 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13853 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13854 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13856 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13857 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13858 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13859 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13862 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13864 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13865 VmaAllocationRequest dstAllocRequest;
13866 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13867 m_CurrentFrameIndex,
13868 m_pBlockVector->GetFrameInUseCount(),
13869 m_pBlockVector->GetBufferImageGranularity(),
13876 &dstAllocRequest) &&
13878 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13880 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13883 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13884 (m_BytesMoved + size > maxBytesToMove))
13889 VmaDefragmentationMove move = {};
13890 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13891 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13892 move.srcOffset = srcOffset;
13893 move.dstOffset = dstAllocRequest.offset;
13895 move.hAllocation = allocInfo.m_hAllocation;
13896 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13897 move.pDstBlock = pDstBlockInfo->m_pBlock;
13899 moves.push_back(move);
13901 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13905 allocInfo.m_hAllocation);
13907 if(freeOldAllocations)
13909 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13910 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13913 if(allocInfo.m_pChanged != VMA_NULL)
13915 *allocInfo.m_pChanged = VK_TRUE;
13918 ++m_AllocationsMoved;
13919 m_BytesMoved += size;
13921 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13929 if(srcAllocIndex > 0)
13935 if(srcBlockIndex > 0)
13938 srcAllocIndex = SIZE_MAX;
13948 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13951 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13953 if(m_Blocks[i]->m_HasNonMovableAllocations)
13961 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13962 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13963 VkDeviceSize maxBytesToMove,
13964 uint32_t maxAllocationsToMove,
13967 if(!m_AllAllocations && m_AllocationCount == 0)
13972 const size_t blockCount = m_Blocks.size();
13973 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13975 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13977 if(m_AllAllocations)
13979 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13980 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13981 it != pMetadata->m_Suballocations.end();
13984 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13986 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13987 pBlockInfo->m_Allocations.push_back(allocInfo);
13992 pBlockInfo->CalcHasNonMovableAllocations();
13996 pBlockInfo->SortAllocationsByOffsetDescending();
14002 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14005 const uint32_t roundCount = 2;
14008 VkResult result = VK_SUCCESS;
14009 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14017 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14018 size_t dstBlockIndex, VkDeviceSize dstOffset,
14019 size_t srcBlockIndex, VkDeviceSize srcOffset)
14021 if(dstBlockIndex < srcBlockIndex)
14025 if(dstBlockIndex > srcBlockIndex)
14029 if(dstOffset < srcOffset)
14039 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14041 VmaBlockVector* pBlockVector,
14042 uint32_t currentFrameIndex,
14043 bool overlappingMoveSupported) :
14044 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14045 m_OverlappingMoveSupported(overlappingMoveSupported),
14046 m_AllocationCount(0),
14047 m_AllAllocations(false),
14049 m_AllocationsMoved(0),
14050 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14052 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14056 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14060 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14061 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14062 VkDeviceSize maxBytesToMove,
14063 uint32_t maxAllocationsToMove,
14066 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14068 const size_t blockCount = m_pBlockVector->GetBlockCount();
14069 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14074 PreprocessMetadata();
14078 m_BlockInfos.resize(blockCount);
14079 for(
size_t i = 0; i < blockCount; ++i)
14081 m_BlockInfos[i].origBlockIndex = i;
14084 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14085 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14086 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14091 FreeSpaceDatabase freeSpaceDb;
14093 size_t dstBlockInfoIndex = 0;
14094 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14095 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14096 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14097 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14098 VkDeviceSize dstOffset = 0;
14101 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14103 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14104 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14105 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14106 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14107 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14109 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14110 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14111 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14112 if(m_AllocationsMoved == maxAllocationsToMove ||
14113 m_BytesMoved + srcAllocSize > maxBytesToMove)
14118 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14120 VmaDefragmentationMove move = {};
14122 size_t freeSpaceInfoIndex;
14123 VkDeviceSize dstAllocOffset;
14124 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14125 freeSpaceInfoIndex, dstAllocOffset))
14127 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14128 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14129 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14132 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14134 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14138 VmaSuballocation suballoc = *srcSuballocIt;
14139 suballoc.offset = dstAllocOffset;
14140 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14141 m_BytesMoved += srcAllocSize;
14142 ++m_AllocationsMoved;
14144 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14146 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14147 srcSuballocIt = nextSuballocIt;
14149 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14151 move.srcBlockIndex = srcOrigBlockIndex;
14152 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14153 move.srcOffset = srcAllocOffset;
14154 move.dstOffset = dstAllocOffset;
14155 move.size = srcAllocSize;
14157 moves.push_back(move);
14164 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14166 VmaSuballocation suballoc = *srcSuballocIt;
14167 suballoc.offset = dstAllocOffset;
14168 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14169 m_BytesMoved += srcAllocSize;
14170 ++m_AllocationsMoved;
14172 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14174 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14175 srcSuballocIt = nextSuballocIt;
14177 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14179 move.srcBlockIndex = srcOrigBlockIndex;
14180 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14181 move.srcOffset = srcAllocOffset;
14182 move.dstOffset = dstAllocOffset;
14183 move.size = srcAllocSize;
14185 moves.push_back(move);
14190 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14193 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14194 dstAllocOffset + srcAllocSize > dstBlockSize)
14197 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14199 ++dstBlockInfoIndex;
14200 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14201 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14202 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14203 dstBlockSize = pDstMetadata->GetSize();
14205 dstAllocOffset = 0;
14209 if(dstBlockInfoIndex == srcBlockInfoIndex)
14211 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14213 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14215 bool skipOver = overlap;
14216 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14220 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14225 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14227 dstOffset = srcAllocOffset + srcAllocSize;
14233 srcSuballocIt->offset = dstAllocOffset;
14234 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14235 dstOffset = dstAllocOffset + srcAllocSize;
14236 m_BytesMoved += srcAllocSize;
14237 ++m_AllocationsMoved;
14240 move.srcBlockIndex = srcOrigBlockIndex;
14241 move.dstBlockIndex = dstOrigBlockIndex;
14242 move.srcOffset = srcAllocOffset;
14243 move.dstOffset = dstAllocOffset;
14244 move.size = srcAllocSize;
14246 moves.push_back(move);
14254 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14255 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14257 VmaSuballocation suballoc = *srcSuballocIt;
14258 suballoc.offset = dstAllocOffset;
14259 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14260 dstOffset = dstAllocOffset + srcAllocSize;
14261 m_BytesMoved += srcAllocSize;
14262 ++m_AllocationsMoved;
14264 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14266 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14267 srcSuballocIt = nextSuballocIt;
14269 pDstMetadata->m_Suballocations.push_back(suballoc);
14271 move.srcBlockIndex = srcOrigBlockIndex;
14272 move.dstBlockIndex = dstOrigBlockIndex;
14273 move.srcOffset = srcAllocOffset;
14274 move.dstOffset = dstAllocOffset;
14275 move.size = srcAllocSize;
14277 moves.push_back(move);
14283 m_BlockInfos.clear();
14285 PostprocessMetadata();
14290 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14292 const size_t blockCount = m_pBlockVector->GetBlockCount();
14293 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14295 VmaBlockMetadata_Generic*
const pMetadata =
14296 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14297 pMetadata->m_FreeCount = 0;
14298 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14299 pMetadata->m_FreeSuballocationsBySize.clear();
14300 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14301 it != pMetadata->m_Suballocations.end(); )
14303 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14305 VmaSuballocationList::iterator nextIt = it;
14307 pMetadata->m_Suballocations.erase(it);
14318 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14320 const size_t blockCount = m_pBlockVector->GetBlockCount();
14321 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14323 VmaBlockMetadata_Generic*
const pMetadata =
14324 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14325 const VkDeviceSize blockSize = pMetadata->GetSize();
14328 if(pMetadata->m_Suballocations.empty())
14330 pMetadata->m_FreeCount = 1;
14332 VmaSuballocation suballoc = {
14336 VMA_SUBALLOCATION_TYPE_FREE };
14337 pMetadata->m_Suballocations.push_back(suballoc);
14338 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14343 VkDeviceSize offset = 0;
14344 VmaSuballocationList::iterator it;
14345 for(it = pMetadata->m_Suballocations.begin();
14346 it != pMetadata->m_Suballocations.end();
14349 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14350 VMA_ASSERT(it->offset >= offset);
14353 if(it->offset > offset)
14355 ++pMetadata->m_FreeCount;
14356 const VkDeviceSize freeSize = it->offset - offset;
14357 VmaSuballocation suballoc = {
14361 VMA_SUBALLOCATION_TYPE_FREE };
14362 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14363 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14365 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14369 pMetadata->m_SumFreeSize -= it->size;
14370 offset = it->offset + it->size;
14374 if(offset < blockSize)
14376 ++pMetadata->m_FreeCount;
14377 const VkDeviceSize freeSize = blockSize - offset;
14378 VmaSuballocation suballoc = {
14382 VMA_SUBALLOCATION_TYPE_FREE };
14383 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14384 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14385 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14387 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14392 pMetadata->m_FreeSuballocationsBySize.begin(),
14393 pMetadata->m_FreeSuballocationsBySize.end(),
14394 VmaSuballocationItemSizeLess());
14397 VMA_HEAVY_ASSERT(pMetadata->Validate());
14401 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14404 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14405 while(it != pMetadata->m_Suballocations.end())
14407 if(it->offset < suballoc.offset)
14412 pMetadata->m_Suballocations.insert(it, suballoc);
14418 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14421 VmaBlockVector* pBlockVector,
14422 uint32_t currFrameIndex) :
14424 mutexLocked(false),
14425 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14426 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14427 defragmentationMovesProcessed(0),
14428 defragmentationMovesCommitted(0),
14429 hasDefragmentationPlan(0),
14430 m_hAllocator(hAllocator),
14431 m_hCustomPool(hCustomPool),
14432 m_pBlockVector(pBlockVector),
14433 m_CurrFrameIndex(currFrameIndex),
14434 m_pAlgorithm(VMA_NULL),
14435 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14436 m_AllAllocations(false)
14440 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14442 vma_delete(m_hAllocator, m_pAlgorithm);
14445 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14447 AllocInfo info = { hAlloc, pChanged };
14448 m_Allocations.push_back(info);
14451 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14453 const bool allAllocations = m_AllAllocations ||
14454 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14467 if(VMA_DEBUG_MARGIN == 0 &&
14469 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14472 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14473 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14477 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14478 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14483 m_pAlgorithm->AddAll();
14487 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14489 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14497 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14499 uint32_t currFrameIndex,
14502 m_hAllocator(hAllocator),
14503 m_CurrFrameIndex(currFrameIndex),
14506 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14508 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14511 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14513 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14515 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14516 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14517 vma_delete(m_hAllocator, pBlockVectorCtx);
14519 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14521 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14522 if(pBlockVectorCtx)
14524 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14525 vma_delete(m_hAllocator, pBlockVectorCtx);
14530 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14532 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14534 VmaPool pool = pPools[poolIndex];
14537 if(pool->m_BlockVector.GetAlgorithm() == 0)
14539 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14541 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14543 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14545 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14550 if(!pBlockVectorDefragCtx)
14552 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14555 &pool->m_BlockVector,
14557 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14560 pBlockVectorDefragCtx->AddAll();
14565 void VmaDefragmentationContext_T::AddAllocations(
14566 uint32_t allocationCount,
14568 VkBool32* pAllocationsChanged)
14571 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14574 VMA_ASSERT(hAlloc);
14576 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14578 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14580 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14582 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14584 if(hAllocPool != VK_NULL_HANDLE)
14587 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14589 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14591 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14593 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14597 if(!pBlockVectorDefragCtx)
14599 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14602 &hAllocPool->m_BlockVector,
14604 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14611 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14612 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14613 if(!pBlockVectorDefragCtx)
14615 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14618 m_hAllocator->m_pBlockVectors[memTypeIndex],
14620 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14624 if(pBlockVectorDefragCtx)
14626 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14627 &pAllocationsChanged[allocIndex] : VMA_NULL;
14628 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14634 VkResult VmaDefragmentationContext_T::Defragment(
14635 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14636 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14648 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14649 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14651 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14652 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14654 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14655 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14658 return VK_NOT_READY;
14661 if(commandBuffer == VK_NULL_HANDLE)
14663 maxGpuBytesToMove = 0;
14664 maxGpuAllocationsToMove = 0;
14667 VkResult res = VK_SUCCESS;
14670 for(uint32_t memTypeIndex = 0;
14671 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14674 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14675 if(pBlockVectorCtx)
14677 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14678 pBlockVectorCtx->GetBlockVector()->Defragment(
14681 maxCpuBytesToMove, maxCpuAllocationsToMove,
14682 maxGpuBytesToMove, maxGpuAllocationsToMove,
14684 if(pBlockVectorCtx->res != VK_SUCCESS)
14686 res = pBlockVectorCtx->res;
14692 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14693 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14696 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14697 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14698 pBlockVectorCtx->GetBlockVector()->Defragment(
14701 maxCpuBytesToMove, maxCpuAllocationsToMove,
14702 maxGpuBytesToMove, maxGpuAllocationsToMove,
14704 if(pBlockVectorCtx->res != VK_SUCCESS)
14706 res = pBlockVectorCtx->res;
14719 for(uint32_t memTypeIndex = 0;
14720 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14723 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14724 if(pBlockVectorCtx)
14726 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14728 if(!pBlockVectorCtx->hasDefragmentationPlan)
14730 pBlockVectorCtx->GetBlockVector()->Defragment(
14733 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14734 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14737 if(pBlockVectorCtx->res < VK_SUCCESS)
14740 pBlockVectorCtx->hasDefragmentationPlan =
true;
14743 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14745 pCurrentMove, movesLeft);
14747 movesLeft -= processed;
14748 pCurrentMove += processed;
14753 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14754 customCtxIndex < customCtxCount;
14757 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14758 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14760 if(!pBlockVectorCtx->hasDefragmentationPlan)
14762 pBlockVectorCtx->GetBlockVector()->Defragment(
14765 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14766 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14769 if(pBlockVectorCtx->res < VK_SUCCESS)
14772 pBlockVectorCtx->hasDefragmentationPlan =
true;
14775 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14777 pCurrentMove, movesLeft);
14779 movesLeft -= processed;
14780 pCurrentMove += processed;
14787 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14789 VkResult res = VK_SUCCESS;
14792 for(uint32_t memTypeIndex = 0;
14793 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14796 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14797 if(pBlockVectorCtx)
14799 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14801 if(!pBlockVectorCtx->hasDefragmentationPlan)
14803 res = VK_NOT_READY;
14807 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14808 pBlockVectorCtx, m_pStats);
14810 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14811 res = VK_NOT_READY;
14816 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14817 customCtxIndex < customCtxCount;
14820 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14821 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14823 if(!pBlockVectorCtx->hasDefragmentationPlan)
14825 res = VK_NOT_READY;
14829 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14830 pBlockVectorCtx, m_pStats);
14832 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14833 res = VK_NOT_READY;
14842 #if VMA_RECORDING_ENABLED
14844 VmaRecorder::VmaRecorder() :
14849 m_StartCounter(INT64_MAX)
14855 m_UseMutex = useMutex;
14856 m_Flags = settings.
flags;
14858 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
14859 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
14862 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14865 return VK_ERROR_INITIALIZATION_FAILED;
14869 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14870 fprintf(m_File,
"%s\n",
"1,8");
14875 VmaRecorder::~VmaRecorder()
14877 if(m_File != VMA_NULL)
14883 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14885 CallParams callParams;
14886 GetBasicParams(callParams);
14888 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14889 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14893 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14895 CallParams callParams;
14896 GetBasicParams(callParams);
14898 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14899 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14905 CallParams callParams;
14906 GetBasicParams(callParams);
14908 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14909 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14920 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14922 CallParams callParams;
14923 GetBasicParams(callParams);
14925 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14926 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14931 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14932 const VkMemoryRequirements& vkMemReq,
14936 CallParams callParams;
14937 GetBasicParams(callParams);
14939 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14940 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14941 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14943 vkMemReq.alignment,
14944 vkMemReq.memoryTypeBits,
14952 userDataStr.GetString());
14956 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14957 const VkMemoryRequirements& vkMemReq,
14959 uint64_t allocationCount,
14962 CallParams callParams;
14963 GetBasicParams(callParams);
14965 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14966 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14967 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14969 vkMemReq.alignment,
14970 vkMemReq.memoryTypeBits,
14977 PrintPointerList(allocationCount, pAllocations);
14978 fprintf(m_File,
",%s\n", userDataStr.GetString());
14982 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14983 const VkMemoryRequirements& vkMemReq,
14984 bool requiresDedicatedAllocation,
14985 bool prefersDedicatedAllocation,
14989 CallParams callParams;
14990 GetBasicParams(callParams);
14992 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14993 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14994 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14996 vkMemReq.alignment,
14997 vkMemReq.memoryTypeBits,
14998 requiresDedicatedAllocation ? 1 : 0,
14999 prefersDedicatedAllocation ? 1 : 0,
15007 userDataStr.GetString());
15011 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15012 const VkMemoryRequirements& vkMemReq,
15013 bool requiresDedicatedAllocation,
15014 bool prefersDedicatedAllocation,
15018 CallParams callParams;
15019 GetBasicParams(callParams);
15021 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15022 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15023 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15025 vkMemReq.alignment,
15026 vkMemReq.memoryTypeBits,
15027 requiresDedicatedAllocation ? 1 : 0,
15028 prefersDedicatedAllocation ? 1 : 0,
15036 userDataStr.GetString());
15040 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15043 CallParams callParams;
15044 GetBasicParams(callParams);
15046 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15047 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15052 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15053 uint64_t allocationCount,
15056 CallParams callParams;
15057 GetBasicParams(callParams);
15059 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15060 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15061 PrintPointerList(allocationCount, pAllocations);
15062 fprintf(m_File,
"\n");
15066 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15068 const void* pUserData)
15070 CallParams callParams;
15071 GetBasicParams(callParams);
15073 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15074 UserDataString userDataStr(
15077 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15079 userDataStr.GetString());
15083 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15086 CallParams callParams;
15087 GetBasicParams(callParams);
15089 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15090 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15095 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15098 CallParams callParams;
15099 GetBasicParams(callParams);
15101 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15102 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15107 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15110 CallParams callParams;
15111 GetBasicParams(callParams);
15113 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15114 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15119 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15120 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15122 CallParams callParams;
15123 GetBasicParams(callParams);
15125 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15126 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15133 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15134 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15136 CallParams callParams;
15137 GetBasicParams(callParams);
15139 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15140 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15147 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15148 const VkBufferCreateInfo& bufCreateInfo,
15152 CallParams callParams;
15153 GetBasicParams(callParams);
15155 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15156 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15157 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15158 bufCreateInfo.flags,
15159 bufCreateInfo.size,
15160 bufCreateInfo.usage,
15161 bufCreateInfo.sharingMode,
15162 allocCreateInfo.
flags,
15163 allocCreateInfo.
usage,
15167 allocCreateInfo.
pool,
15169 userDataStr.GetString());
15173 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15174 const VkImageCreateInfo& imageCreateInfo,
15178 CallParams callParams;
15179 GetBasicParams(callParams);
15181 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15182 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15183 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15184 imageCreateInfo.flags,
15185 imageCreateInfo.imageType,
15186 imageCreateInfo.format,
15187 imageCreateInfo.extent.width,
15188 imageCreateInfo.extent.height,
15189 imageCreateInfo.extent.depth,
15190 imageCreateInfo.mipLevels,
15191 imageCreateInfo.arrayLayers,
15192 imageCreateInfo.samples,
15193 imageCreateInfo.tiling,
15194 imageCreateInfo.usage,
15195 imageCreateInfo.sharingMode,
15196 imageCreateInfo.initialLayout,
15197 allocCreateInfo.
flags,
15198 allocCreateInfo.
usage,
15202 allocCreateInfo.
pool,
15204 userDataStr.GetString());
15208 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15211 CallParams callParams;
15212 GetBasicParams(callParams);
15214 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15215 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15220 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15223 CallParams callParams;
15224 GetBasicParams(callParams);
15226 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15227 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15232 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15235 CallParams callParams;
15236 GetBasicParams(callParams);
15238 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15239 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15244 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15247 CallParams callParams;
15248 GetBasicParams(callParams);
15250 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15251 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15256 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15259 CallParams callParams;
15260 GetBasicParams(callParams);
15262 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15263 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15268 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15272 CallParams callParams;
15273 GetBasicParams(callParams);
15275 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15276 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15279 fprintf(m_File,
",");
15281 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15291 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15294 CallParams callParams;
15295 GetBasicParams(callParams);
15297 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15298 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15303 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15307 CallParams callParams;
15308 GetBasicParams(callParams);
15310 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15311 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15312 pool, name != VMA_NULL ? name :
"");
15318 if(pUserData != VMA_NULL)
15322 m_Str = (
const char*)pUserData;
15326 sprintf_s(m_PtrStr,
"%p", pUserData);
15336 void VmaRecorder::WriteConfiguration(
15337 const VkPhysicalDeviceProperties& devProps,
15338 const VkPhysicalDeviceMemoryProperties& memProps,
15339 uint32_t vulkanApiVersion,
15340 bool dedicatedAllocationExtensionEnabled,
15341 bool bindMemory2ExtensionEnabled,
15342 bool memoryBudgetExtensionEnabled,
15343 bool deviceCoherentMemoryExtensionEnabled)
15345 fprintf(m_File,
"Config,Begin\n");
15347 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15349 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15350 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15351 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15352 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15353 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15354 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15356 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15357 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15358 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15360 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15361 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15363 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15364 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15366 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15367 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15369 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15370 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15373 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15374 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15375 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15376 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15378 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15379 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15380 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15381 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15382 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15383 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15384 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15385 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15386 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15388 fprintf(m_File,
"Config,End\n");
15391 void VmaRecorder::GetBasicParams(CallParams& outParams)
15393 outParams.threadId = GetCurrentThreadId();
15395 LARGE_INTEGER counter;
15396 QueryPerformanceCounter(&counter);
15397 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
15400 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15404 fprintf(m_File,
"%p", pItems[0]);
15405 for(uint64_t i = 1; i < count; ++i)
15407 fprintf(m_File,
" %p", pItems[i]);
15412 void VmaRecorder::Flush()
15420 #endif // #if VMA_RECORDING_ENABLED
15425 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15426 m_Allocator(pAllocationCallbacks, 1024)
15430 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15432 VmaMutexLock mutexLock(m_Mutex);
15433 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15436 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15438 VmaMutexLock mutexLock(m_Mutex);
15439 m_Allocator.Free(hAlloc);
15447 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15453 m_hDevice(pCreateInfo->device),
15454 m_hInstance(pCreateInfo->instance),
15455 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15456 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15457 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15458 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15459 m_HeapSizeLimitMask(0),
15460 m_PreferredLargeHeapBlockSize(0),
15461 m_PhysicalDevice(pCreateInfo->physicalDevice),
15462 m_CurrentFrameIndex(0),
15463 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15464 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15466 m_GlobalMemoryTypeBits(UINT32_MAX)
15468 ,m_pRecorder(VMA_NULL)
15471 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15473 m_UseKhrDedicatedAllocation =
false;
15474 m_UseKhrBindMemory2 =
false;
15477 if(VMA_DEBUG_DETECT_CORRUPTION)
15480 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15485 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15487 #if !(VMA_DEDICATED_ALLOCATION)
15490 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15493 #if !(VMA_BIND_MEMORY2)
15496 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15500 #if !(VMA_MEMORY_BUDGET)
15503 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15506 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15507 if(m_UseKhrBufferDeviceAddress)
15509 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15512 #if VMA_VULKAN_VERSION < 1002000
15513 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15515 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15518 #if VMA_VULKAN_VERSION < 1001000
15519 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15521 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15525 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15526 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15527 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15529 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15530 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15531 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15542 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15543 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15545 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15546 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15547 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15548 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15553 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15557 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15559 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15560 if(limit != VK_WHOLE_SIZE)
15562 m_HeapSizeLimitMask |= 1u << heapIndex;
15563 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15565 m_MemProps.memoryHeaps[heapIndex].size = limit;
15571 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15573 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15575 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15579 preferredBlockSize,
15582 GetBufferImageGranularity(),
15588 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15595 VkResult res = VK_SUCCESS;
15600 #if VMA_RECORDING_ENABLED
15601 m_pRecorder = vma_new(
this, VmaRecorder)();
15603 if(res != VK_SUCCESS)
15607 m_pRecorder->WriteConfiguration(
15608 m_PhysicalDeviceProperties,
15610 m_VulkanApiVersion,
15611 m_UseKhrDedicatedAllocation,
15612 m_UseKhrBindMemory2,
15613 m_UseExtMemoryBudget,
15614 m_UseAmdDeviceCoherentMemory);
15615 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15617 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15618 return VK_ERROR_FEATURE_NOT_PRESENT;
15622 #if VMA_MEMORY_BUDGET
15623 if(m_UseExtMemoryBudget)
15625 UpdateVulkanBudget();
15627 #endif // #if VMA_MEMORY_BUDGET
15632 VmaAllocator_T::~VmaAllocator_T()
15634 #if VMA_RECORDING_ENABLED
15635 if(m_pRecorder != VMA_NULL)
15637 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15638 vma_delete(
this, m_pRecorder);
15642 VMA_ASSERT(m_Pools.empty());
15644 for(
size_t i = GetMemoryTypeCount(); i--; )
15646 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15648 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15651 vma_delete(
this, m_pDedicatedAllocations[i]);
15652 vma_delete(
this, m_pBlockVectors[i]);
15656 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15658 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15659 ImportVulkanFunctions_Static();
15662 if(pVulkanFunctions != VMA_NULL)
15664 ImportVulkanFunctions_Custom(pVulkanFunctions);
15667 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15668 ImportVulkanFunctions_Dynamic();
15671 ValidateVulkanFunctions();
15674 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15676 void VmaAllocator_T::ImportVulkanFunctions_Static()
15679 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15680 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15681 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15682 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15683 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15684 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15685 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15686 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15687 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15688 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15689 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15690 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15691 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15692 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15693 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15694 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15695 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15698 #if VMA_VULKAN_VERSION >= 1001000
15699 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15701 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15702 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15703 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15704 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15705 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15710 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15712 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15714 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15716 #define VMA_COPY_IF_NOT_NULL(funcName) \
15717 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15719 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15720 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15721 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15722 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15723 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15724 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15725 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15726 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15727 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15728 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15729 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15730 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15731 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15732 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15733 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15734 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15735 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15737 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15738 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15739 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15742 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15743 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15744 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15747 #if VMA_MEMORY_BUDGET
15748 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15751 #undef VMA_COPY_IF_NOT_NULL
15754 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15756 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15758 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15759 if(m_VulkanFunctions.memberName == VMA_NULL) \
15760 m_VulkanFunctions.memberName = \
15761 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15762 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15763 if(m_VulkanFunctions.memberName == VMA_NULL) \
15764 m_VulkanFunctions.memberName = \
15765 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15767 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15768 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15769 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15770 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15771 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15772 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15773 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15774 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15775 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15776 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15777 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15778 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15779 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15780 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15781 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15782 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15783 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
15785 #if VMA_DEDICATED_ALLOCATION
15786 if(m_UseKhrDedicatedAllocation)
15788 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
15789 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
15793 #if VMA_BIND_MEMORY2
15794 if(m_UseKhrBindMemory2)
15796 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
15797 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
15799 #endif // #if VMA_BIND_MEMORY2
15801 #if VMA_MEMORY_BUDGET
15802 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15804 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15806 #endif // #if VMA_MEMORY_BUDGET
15808 #undef VMA_FETCH_DEVICE_FUNC
15809 #undef VMA_FETCH_INSTANCE_FUNC
15812 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15814 void VmaAllocator_T::ValidateVulkanFunctions()
15816 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15817 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15818 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15819 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15820 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15821 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15822 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15823 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15824 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15825 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15826 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15827 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15828 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15829 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15830 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15831 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15832 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15834 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15835 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15837 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15838 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15842 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15843 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15845 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15846 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15850 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15851 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15853 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15858 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15860 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15861 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15862 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15863 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15866 VkResult VmaAllocator_T::AllocateMemoryOfType(
15868 VkDeviceSize alignment,
15869 bool dedicatedAllocation,
15870 VkBuffer dedicatedBuffer,
15871 VkBufferUsageFlags dedicatedBufferUsage,
15872 VkImage dedicatedImage,
15874 uint32_t memTypeIndex,
15875 VmaSuballocationType suballocType,
15876 size_t allocationCount,
15879 VMA_ASSERT(pAllocations != VMA_NULL);
15880 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15886 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15896 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15897 VMA_ASSERT(blockVector);
15899 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15900 bool preferDedicatedMemory =
15901 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15902 dedicatedAllocation ||
15904 size > preferredBlockSize / 2;
15906 if(preferDedicatedMemory &&
15908 finalCreateInfo.
pool == VK_NULL_HANDLE)
15917 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15921 return AllocateDedicatedMemory(
15930 dedicatedBufferUsage,
15938 VkResult res = blockVector->Allocate(
15939 m_CurrentFrameIndex.load(),
15946 if(res == VK_SUCCESS)
15954 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15958 res = AllocateDedicatedMemory(
15967 dedicatedBufferUsage,
15971 if(res == VK_SUCCESS)
15974 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15980 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15987 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15989 VmaSuballocationType suballocType,
15990 uint32_t memTypeIndex,
15993 bool isUserDataString,
15995 VkBuffer dedicatedBuffer,
15996 VkBufferUsageFlags dedicatedBufferUsage,
15997 VkImage dedicatedImage,
15998 size_t allocationCount,
16001 VMA_ASSERT(allocationCount > 0 && pAllocations);
16005 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16007 GetBudget(&heapBudget, heapIndex, 1);
16008 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16010 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16014 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16015 allocInfo.memoryTypeIndex = memTypeIndex;
16016 allocInfo.allocationSize = size;
16018 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16019 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16020 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16022 if(dedicatedBuffer != VK_NULL_HANDLE)
16024 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16025 dedicatedAllocInfo.buffer = dedicatedBuffer;
16026 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16028 else if(dedicatedImage != VK_NULL_HANDLE)
16030 dedicatedAllocInfo.image = dedicatedImage;
16031 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16034 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16036 #if VMA_BUFFER_DEVICE_ADDRESS
16037 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16038 if(m_UseKhrBufferDeviceAddress)
16040 bool canContainBufferWithDeviceAddress =
true;
16041 if(dedicatedBuffer != VK_NULL_HANDLE)
16043 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16044 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16046 else if(dedicatedImage != VK_NULL_HANDLE)
16048 canContainBufferWithDeviceAddress =
false;
16050 if(canContainBufferWithDeviceAddress)
16052 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16053 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16056 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
16059 VkResult res = VK_SUCCESS;
16060 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16062 res = AllocateDedicatedMemoryPage(
16070 pAllocations + allocIndex);
16071 if(res != VK_SUCCESS)
16077 if(res == VK_SUCCESS)
16081 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16082 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16083 VMA_ASSERT(pDedicatedAllocations);
16084 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16086 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
16090 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16095 while(allocIndex--)
16098 VkDeviceMemory hMemory = currAlloc->GetMemory();
16110 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16111 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16112 currAlloc->SetUserData(
this, VMA_NULL);
16113 m_AllocationObjectAllocator.Free(currAlloc);
16116 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16122 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16124 VmaSuballocationType suballocType,
16125 uint32_t memTypeIndex,
16126 const VkMemoryAllocateInfo& allocInfo,
16128 bool isUserDataString,
16132 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16133 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16136 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16140 void* pMappedData = VMA_NULL;
16143 res = (*m_VulkanFunctions.vkMapMemory)(
16152 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16153 FreeVulkanMemory(memTypeIndex, size, hMemory);
16158 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16159 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16160 (*pAllocation)->SetUserData(
this, pUserData);
16161 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16162 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16164 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16170 void VmaAllocator_T::GetBufferMemoryRequirements(
16172 VkMemoryRequirements& memReq,
16173 bool& requiresDedicatedAllocation,
16174 bool& prefersDedicatedAllocation)
const
16176 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16177 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16179 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16180 memReqInfo.buffer = hBuffer;
16182 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16184 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16185 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16187 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16189 memReq = memReq2.memoryRequirements;
16190 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16191 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16194 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16196 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16197 requiresDedicatedAllocation =
false;
16198 prefersDedicatedAllocation =
false;
16202 void VmaAllocator_T::GetImageMemoryRequirements(
16204 VkMemoryRequirements& memReq,
16205 bool& requiresDedicatedAllocation,
16206 bool& prefersDedicatedAllocation)
const
16208 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16209 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16211 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16212 memReqInfo.image = hImage;
16214 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16216 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16217 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16219 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16221 memReq = memReq2.memoryRequirements;
16222 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16223 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16226 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16228 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16229 requiresDedicatedAllocation =
false;
16230 prefersDedicatedAllocation =
false;
16234 VkResult VmaAllocator_T::AllocateMemory(
16235 const VkMemoryRequirements& vkMemReq,
16236 bool requiresDedicatedAllocation,
16237 bool prefersDedicatedAllocation,
16238 VkBuffer dedicatedBuffer,
16239 VkBufferUsageFlags dedicatedBufferUsage,
16240 VkImage dedicatedImage,
16242 VmaSuballocationType suballocType,
16243 size_t allocationCount,
16246 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16248 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16250 if(vkMemReq.size == 0)
16252 return VK_ERROR_VALIDATION_FAILED_EXT;
16257 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16258 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16263 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16264 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16266 if(requiresDedicatedAllocation)
16270 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16271 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16273 if(createInfo.
pool != VK_NULL_HANDLE)
16275 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16276 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16279 if((createInfo.
pool != VK_NULL_HANDLE) &&
16282 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16283 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16286 if(createInfo.
pool != VK_NULL_HANDLE)
16288 const VkDeviceSize alignmentForPool = VMA_MAX(
16289 vkMemReq.alignment,
16290 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16295 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16300 return createInfo.
pool->m_BlockVector.Allocate(
16301 m_CurrentFrameIndex.load(),
16312 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16313 uint32_t memTypeIndex = UINT32_MAX;
16315 if(res == VK_SUCCESS)
16317 VkDeviceSize alignmentForMemType = VMA_MAX(
16318 vkMemReq.alignment,
16319 GetMemoryTypeMinAlignment(memTypeIndex));
16321 res = AllocateMemoryOfType(
16323 alignmentForMemType,
16324 requiresDedicatedAllocation || prefersDedicatedAllocation,
16326 dedicatedBufferUsage,
16334 if(res == VK_SUCCESS)
16344 memoryTypeBits &= ~(1u << memTypeIndex);
16347 if(res == VK_SUCCESS)
16349 alignmentForMemType = VMA_MAX(
16350 vkMemReq.alignment,
16351 GetMemoryTypeMinAlignment(memTypeIndex));
16353 res = AllocateMemoryOfType(
16355 alignmentForMemType,
16356 requiresDedicatedAllocation || prefersDedicatedAllocation,
16358 dedicatedBufferUsage,
16366 if(res == VK_SUCCESS)
16376 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16387 void VmaAllocator_T::FreeMemory(
16388 size_t allocationCount,
16391 VMA_ASSERT(pAllocations);
16393 for(
size_t allocIndex = allocationCount; allocIndex--; )
16397 if(allocation != VK_NULL_HANDLE)
16399 if(TouchAllocation(allocation))
16401 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16403 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16406 switch(allocation->GetType())
16408 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16410 VmaBlockVector* pBlockVector = VMA_NULL;
16411 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16412 if(hPool != VK_NULL_HANDLE)
16414 pBlockVector = &hPool->m_BlockVector;
16418 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16419 pBlockVector = m_pBlockVectors[memTypeIndex];
16421 pBlockVector->Free(allocation);
16424 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16425 FreeDedicatedMemory(allocation);
16433 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16434 allocation->SetUserData(
this, VMA_NULL);
16435 m_AllocationObjectAllocator.Free(allocation);
16440 VkResult VmaAllocator_T::ResizeAllocation(
16442 VkDeviceSize newSize)
16445 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16447 return VK_ERROR_VALIDATION_FAILED_EXT;
16449 if(newSize == alloc->GetSize())
16453 return VK_ERROR_OUT_OF_POOL_MEMORY;
16456 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16459 InitStatInfo(pStats->
total);
16460 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16462 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16466 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16468 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16469 VMA_ASSERT(pBlockVector);
16470 pBlockVector->AddStats(pStats);
16475 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16476 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16478 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16483 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16485 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16486 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16487 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16488 VMA_ASSERT(pDedicatedAllocVector);
16489 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16492 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16493 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16494 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16495 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16500 VmaPostprocessCalcStatInfo(pStats->
total);
16501 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16502 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16503 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16504 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16507 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16509 #if VMA_MEMORY_BUDGET
16510 if(m_UseExtMemoryBudget)
16512 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16514 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16515 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16517 const uint32_t heapIndex = firstHeap + i;
16519 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16522 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16524 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16525 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16529 outBudget->
usage = 0;
16533 outBudget->
budget = VMA_MIN(
16534 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16539 UpdateVulkanBudget();
16540 GetBudget(outBudget, firstHeap, heapCount);
16546 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16548 const uint32_t heapIndex = firstHeap + i;
16550 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16554 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16559 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16561 VkResult VmaAllocator_T::DefragmentationBegin(
16571 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16572 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16575 (*pContext)->AddAllocations(
16578 VkResult res = (*pContext)->Defragment(
16583 if(res != VK_NOT_READY)
16585 vma_delete(
this, *pContext);
16586 *pContext = VMA_NULL;
16592 VkResult VmaAllocator_T::DefragmentationEnd(
16595 vma_delete(
this, context);
16599 VkResult VmaAllocator_T::DefragmentationPassBegin(
16603 return context->DefragmentPassBegin(pInfo);
16605 VkResult VmaAllocator_T::DefragmentationPassEnd(
16608 return context->DefragmentPassEnd();
16614 if(hAllocation->CanBecomeLost())
16620 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16621 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16624 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16628 pAllocationInfo->
offset = 0;
16629 pAllocationInfo->
size = hAllocation->GetSize();
16631 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16634 else if(localLastUseFrameIndex == localCurrFrameIndex)
16636 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16637 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16638 pAllocationInfo->
offset = hAllocation->GetOffset();
16639 pAllocationInfo->
size = hAllocation->GetSize();
16641 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16646 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16648 localLastUseFrameIndex = localCurrFrameIndex;
16655 #if VMA_STATS_STRING_ENABLED
16656 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16657 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16660 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16661 if(localLastUseFrameIndex == localCurrFrameIndex)
16667 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16669 localLastUseFrameIndex = localCurrFrameIndex;
16675 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16676 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16677 pAllocationInfo->
offset = hAllocation->GetOffset();
16678 pAllocationInfo->
size = hAllocation->GetSize();
16679 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16680 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16684 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16687 if(hAllocation->CanBecomeLost())
16689 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16690 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16693 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16697 else if(localLastUseFrameIndex == localCurrFrameIndex)
16703 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16705 localLastUseFrameIndex = localCurrFrameIndex;
16712 #if VMA_STATS_STRING_ENABLED
16713 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16714 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16717 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16718 if(localLastUseFrameIndex == localCurrFrameIndex)
16724 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16726 localLastUseFrameIndex = localCurrFrameIndex;
16738 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16748 return VK_ERROR_INITIALIZATION_FAILED;
16752 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16754 return VK_ERROR_FEATURE_NOT_PRESENT;
16757 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16759 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16761 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16762 if(res != VK_SUCCESS)
16764 vma_delete(
this, *pPool);
16771 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16772 (*pPool)->SetId(m_NextPoolId++);
16773 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16779 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16783 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16784 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16785 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16788 vma_delete(
this, pool);
16793 pool->m_BlockVector.GetPoolStats(pPoolStats);
16796 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16798 m_CurrentFrameIndex.store(frameIndex);
16800 #if VMA_MEMORY_BUDGET
16801 if(m_UseExtMemoryBudget)
16803 UpdateVulkanBudget();
16805 #endif // #if VMA_MEMORY_BUDGET
16808 void VmaAllocator_T::MakePoolAllocationsLost(
16810 size_t* pLostAllocationCount)
16812 hPool->m_BlockVector.MakePoolAllocationsLost(
16813 m_CurrentFrameIndex.load(),
16814 pLostAllocationCount);
16817 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16819 return hPool->m_BlockVector.CheckCorruption();
16822 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16824 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16827 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16829 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16831 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16832 VMA_ASSERT(pBlockVector);
16833 VkResult localRes = pBlockVector->CheckCorruption();
16836 case VK_ERROR_FEATURE_NOT_PRESENT:
16839 finalRes = VK_SUCCESS;
16849 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16850 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16852 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16854 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16857 case VK_ERROR_FEATURE_NOT_PRESENT:
16860 finalRes = VK_SUCCESS;
16872 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16874 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16875 (*pAllocation)->InitLost();
16878 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16880 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16883 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16885 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16886 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16889 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16890 if(blockBytesAfterAllocation > heapSize)
16892 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16894 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16902 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16906 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16908 if(res == VK_SUCCESS)
16910 #if VMA_MEMORY_BUDGET
16911 ++m_Budget.m_OperationsSinceBudgetFetch;
16915 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16917 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
16922 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16928 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16931 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16933 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
16937 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16939 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16942 VkResult VmaAllocator_T::BindVulkanBuffer(
16943 VkDeviceMemory memory,
16944 VkDeviceSize memoryOffset,
16948 if(pNext != VMA_NULL)
16950 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16951 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16952 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
16954 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
16955 bindBufferMemoryInfo.pNext = pNext;
16956 bindBufferMemoryInfo.buffer = buffer;
16957 bindBufferMemoryInfo.memory = memory;
16958 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16959 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16962 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16964 return VK_ERROR_EXTENSION_NOT_PRESENT;
16969 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
16973 VkResult VmaAllocator_T::BindVulkanImage(
16974 VkDeviceMemory memory,
16975 VkDeviceSize memoryOffset,
16979 if(pNext != VMA_NULL)
16981 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16982 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16983 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
16985 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
16986 bindBufferMemoryInfo.pNext = pNext;
16987 bindBufferMemoryInfo.image = image;
16988 bindBufferMemoryInfo.memory = memory;
16989 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16990 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16993 #endif // #if VMA_BIND_MEMORY2
16995 return VK_ERROR_EXTENSION_NOT_PRESENT;
17000 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17004 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17006 if(hAllocation->CanBecomeLost())
17008 return VK_ERROR_MEMORY_MAP_FAILED;
17011 switch(hAllocation->GetType())
17013 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17015 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17016 char *pBytes = VMA_NULL;
17017 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17018 if(res == VK_SUCCESS)
17020 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17021 hAllocation->BlockAllocMap();
17025 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17026 return hAllocation->DedicatedAllocMap(
this, ppData);
17029 return VK_ERROR_MEMORY_MAP_FAILED;
17035 switch(hAllocation->GetType())
17037 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17039 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17040 hAllocation->BlockAllocUnmap();
17041 pBlock->Unmap(
this, 1);
17044 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17045 hAllocation->DedicatedAllocUnmap(
this);
17052 VkResult VmaAllocator_T::BindBufferMemory(
17054 VkDeviceSize allocationLocalOffset,
17058 VkResult res = VK_SUCCESS;
17059 switch(hAllocation->GetType())
17061 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17062 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17064 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17066 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17067 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17068 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17077 VkResult VmaAllocator_T::BindImageMemory(
17079 VkDeviceSize allocationLocalOffset,
17083 VkResult res = VK_SUCCESS;
17084 switch(hAllocation->GetType())
17086 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17087 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17089 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17091 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17092 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17093 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17102 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17104 VkDeviceSize offset, VkDeviceSize size,
17105 VMA_CACHE_OPERATION op)
17107 VkResult res = VK_SUCCESS;
17109 VkMappedMemoryRange memRange = {};
17110 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17114 case VMA_CACHE_FLUSH:
17115 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17117 case VMA_CACHE_INVALIDATE:
17118 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17128 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17129 uint32_t allocationCount,
17131 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17132 VMA_CACHE_OPERATION op)
17134 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17135 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17136 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17138 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17141 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17142 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17143 VkMappedMemoryRange newRange;
17144 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17146 ranges.push_back(newRange);
17150 VkResult res = VK_SUCCESS;
17151 if(!ranges.empty())
17155 case VMA_CACHE_FLUSH:
17156 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17158 case VMA_CACHE_INVALIDATE:
17159 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17169 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17171 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17173 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17175 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17176 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
17177 VMA_ASSERT(pDedicatedAllocations);
17178 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
17179 VMA_ASSERT(success);
17182 VkDeviceMemory hMemory = allocation->GetMemory();
17194 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17196 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17199 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17201 VkBufferCreateInfo dummyBufCreateInfo;
17202 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17204 uint32_t memoryTypeBits = 0;
17207 VkBuffer buf = VK_NULL_HANDLE;
17208 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17209 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17210 if(res == VK_SUCCESS)
17213 VkMemoryRequirements memReq;
17214 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17215 memoryTypeBits = memReq.memoryTypeBits;
17218 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17221 return memoryTypeBits;
17224 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17227 VMA_ASSERT(GetMemoryTypeCount() > 0);
17229 uint32_t memoryTypeBits = UINT32_MAX;
17231 if(!m_UseAmdDeviceCoherentMemory)
17234 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17236 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17238 memoryTypeBits &= ~(1u << memTypeIndex);
17243 return memoryTypeBits;
17246 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17248 VkDeviceSize offset, VkDeviceSize size,
17249 VkMappedMemoryRange& outRange)
const
17251 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17252 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17254 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17255 const VkDeviceSize allocationSize = allocation->GetSize();
17256 VMA_ASSERT(offset <= allocationSize);
17258 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17259 outRange.pNext = VMA_NULL;
17260 outRange.memory = allocation->GetMemory();
17262 switch(allocation->GetType())
17264 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17265 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17266 if(size == VK_WHOLE_SIZE)
17268 outRange.size = allocationSize - outRange.offset;
17272 VMA_ASSERT(offset + size <= allocationSize);
17273 outRange.size = VMA_MIN(
17274 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17275 allocationSize - outRange.offset);
17278 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17281 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17282 if(size == VK_WHOLE_SIZE)
17284 size = allocationSize - offset;
17288 VMA_ASSERT(offset + size <= allocationSize);
17290 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17293 const VkDeviceSize allocationOffset = allocation->GetOffset();
17294 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17295 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17296 outRange.offset += allocationOffset;
17297 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17309 #if VMA_MEMORY_BUDGET
17311 void VmaAllocator_T::UpdateVulkanBudget()
17313 VMA_ASSERT(m_UseExtMemoryBudget);
17315 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17317 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17318 VmaPnextChainPushFront(&memProps, &budgetProps);
17320 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17323 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17325 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17327 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17328 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17329 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17332 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17334 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17336 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17338 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17340 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17342 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17345 m_Budget.m_OperationsSinceBudgetFetch = 0;
17349 #endif // #if VMA_MEMORY_BUDGET
17351 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17353 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17354 !hAllocation->CanBecomeLost() &&
17355 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17357 void* pData = VMA_NULL;
17358 VkResult res = Map(hAllocation, &pData);
17359 if(res == VK_SUCCESS)
17361 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17362 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17363 Unmap(hAllocation);
17367 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17372 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17374 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17375 if(memoryTypeBits == UINT32_MAX)
17377 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17378 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17380 return memoryTypeBits;
17383 #if VMA_STATS_STRING_ENABLED
17385 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17387 bool dedicatedAllocationsStarted =
false;
17388 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17390 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17391 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17392 VMA_ASSERT(pDedicatedAllocVector);
17393 if(pDedicatedAllocVector->empty() ==
false)
17395 if(dedicatedAllocationsStarted ==
false)
17397 dedicatedAllocationsStarted =
true;
17398 json.WriteString(
"DedicatedAllocations");
17399 json.BeginObject();
17402 json.BeginString(
"Type ");
17403 json.ContinueString(memTypeIndex);
17408 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17410 json.BeginObject(
true);
17412 hAlloc->PrintParameters(json);
17419 if(dedicatedAllocationsStarted)
17425 bool allocationsStarted =
false;
17426 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17428 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17430 if(allocationsStarted ==
false)
17432 allocationsStarted =
true;
17433 json.WriteString(
"DefaultPools");
17434 json.BeginObject();
17437 json.BeginString(
"Type ");
17438 json.ContinueString(memTypeIndex);
17441 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17444 if(allocationsStarted)
17452 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17453 const size_t poolCount = m_Pools.size();
17456 json.WriteString(
"Pools");
17457 json.BeginObject();
17458 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17460 json.BeginString();
17461 json.ContinueString(m_Pools[poolIndex]->GetId());
17464 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17471 #endif // #if VMA_STATS_STRING_ENABLED
17480 VMA_ASSERT(pCreateInfo && pAllocator);
17483 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17485 return (*pAllocator)->Init(pCreateInfo);
17491 if(allocator != VK_NULL_HANDLE)
17493 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17494 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17495 vma_delete(&allocationCallbacks, allocator);
17501 VMA_ASSERT(allocator && pAllocatorInfo);
17502 pAllocatorInfo->
instance = allocator->m_hInstance;
17503 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17504 pAllocatorInfo->
device = allocator->m_hDevice;
17509 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17511 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17512 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17517 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17519 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17520 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17525 uint32_t memoryTypeIndex,
17526 VkMemoryPropertyFlags* pFlags)
17528 VMA_ASSERT(allocator && pFlags);
17529 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17530 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17535 uint32_t frameIndex)
17537 VMA_ASSERT(allocator);
17538 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17540 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17542 allocator->SetCurrentFrameIndex(frameIndex);
17549 VMA_ASSERT(allocator && pStats);
17550 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17551 allocator->CalculateStats(pStats);
17558 VMA_ASSERT(allocator && pBudget);
17559 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17560 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17563 #if VMA_STATS_STRING_ENABLED
17567 char** ppStatsString,
17568 VkBool32 detailedMap)
17570 VMA_ASSERT(allocator && ppStatsString);
17571 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17573 VmaStringBuilder sb(allocator);
17575 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17576 json.BeginObject();
17579 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17582 allocator->CalculateStats(&stats);
17584 json.WriteString(
"Total");
17585 VmaPrintStatInfo(json, stats.
total);
17587 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17589 json.BeginString(
"Heap ");
17590 json.ContinueString(heapIndex);
17592 json.BeginObject();
17594 json.WriteString(
"Size");
17595 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17597 json.WriteString(
"Flags");
17598 json.BeginArray(
true);
17599 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17601 json.WriteString(
"DEVICE_LOCAL");
17605 json.WriteString(
"Budget");
17606 json.BeginObject();
17608 json.WriteString(
"BlockBytes");
17609 json.WriteNumber(budget[heapIndex].blockBytes);
17610 json.WriteString(
"AllocationBytes");
17611 json.WriteNumber(budget[heapIndex].allocationBytes);
17612 json.WriteString(
"Usage");
17613 json.WriteNumber(budget[heapIndex].usage);
17614 json.WriteString(
"Budget");
17615 json.WriteNumber(budget[heapIndex].budget);
17621 json.WriteString(
"Stats");
17622 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17625 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17627 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17629 json.BeginString(
"Type ");
17630 json.ContinueString(typeIndex);
17633 json.BeginObject();
17635 json.WriteString(
"Flags");
17636 json.BeginArray(
true);
17637 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17638 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17640 json.WriteString(
"DEVICE_LOCAL");
17642 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17644 json.WriteString(
"HOST_VISIBLE");
17646 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17648 json.WriteString(
"HOST_COHERENT");
17650 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17652 json.WriteString(
"HOST_CACHED");
17654 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17656 json.WriteString(
"LAZILY_ALLOCATED");
17658 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17660 json.WriteString(
" PROTECTED");
17662 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17664 json.WriteString(
" DEVICE_COHERENT");
17666 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17668 json.WriteString(
" DEVICE_UNCACHED");
17674 json.WriteString(
"Stats");
17675 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17684 if(detailedMap == VK_TRUE)
17686 allocator->PrintDetailedMap(json);
17692 const size_t len = sb.GetLength();
17693 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17696 memcpy(pChars, sb.GetData(), len);
17698 pChars[len] =
'\0';
17699 *ppStatsString = pChars;
17704 char* pStatsString)
17706 if(pStatsString != VMA_NULL)
17708 VMA_ASSERT(allocator);
17709 size_t len = strlen(pStatsString);
17710 vma_delete_array(allocator, pStatsString, len + 1);
17714 #endif // #if VMA_STATS_STRING_ENABLED
17721 uint32_t memoryTypeBits,
17723 uint32_t* pMemoryTypeIndex)
17725 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17726 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17727 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17729 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17736 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17737 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17738 uint32_t notPreferredFlags = 0;
17741 switch(pAllocationCreateInfo->
usage)
17746 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17748 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17752 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17755 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17756 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17758 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17762 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17763 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17766 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17769 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17778 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17780 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17783 *pMemoryTypeIndex = UINT32_MAX;
17784 uint32_t minCost = UINT32_MAX;
17785 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17786 memTypeIndex < allocator->GetMemoryTypeCount();
17787 ++memTypeIndex, memTypeBit <<= 1)
17790 if((memTypeBit & memoryTypeBits) != 0)
17792 const VkMemoryPropertyFlags currFlags =
17793 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17795 if((requiredFlags & ~currFlags) == 0)
17798 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17799 VmaCountBitsSet(currFlags & notPreferredFlags);
17801 if(currCost < minCost)
17803 *pMemoryTypeIndex = memTypeIndex;
17808 minCost = currCost;
17813 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17818 const VkBufferCreateInfo* pBufferCreateInfo,
17820 uint32_t* pMemoryTypeIndex)
17822 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17823 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17824 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17825 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17827 const VkDevice hDev = allocator->m_hDevice;
17828 VkBuffer hBuffer = VK_NULL_HANDLE;
17829 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17830 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17831 if(res == VK_SUCCESS)
17833 VkMemoryRequirements memReq = {};
17834 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17835 hDev, hBuffer, &memReq);
17839 memReq.memoryTypeBits,
17840 pAllocationCreateInfo,
17843 allocator->GetVulkanFunctions().vkDestroyBuffer(
17844 hDev, hBuffer, allocator->GetAllocationCallbacks());
17851 const VkImageCreateInfo* pImageCreateInfo,
17853 uint32_t* pMemoryTypeIndex)
17855 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17856 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17857 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17858 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17860 const VkDevice hDev = allocator->m_hDevice;
17861 VkImage hImage = VK_NULL_HANDLE;
17862 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17863 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17864 if(res == VK_SUCCESS)
17866 VkMemoryRequirements memReq = {};
17867 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17868 hDev, hImage, &memReq);
17872 memReq.memoryTypeBits,
17873 pAllocationCreateInfo,
17876 allocator->GetVulkanFunctions().vkDestroyImage(
17877 hDev, hImage, allocator->GetAllocationCallbacks());
17887 VMA_ASSERT(allocator && pCreateInfo && pPool);
17889 VMA_DEBUG_LOG(
"vmaCreatePool");
17891 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17893 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17895 #if VMA_RECORDING_ENABLED
17896 if(allocator->GetRecorder() != VMA_NULL)
17898 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17909 VMA_ASSERT(allocator);
17911 if(pool == VK_NULL_HANDLE)
17916 VMA_DEBUG_LOG(
"vmaDestroyPool");
17918 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17920 #if VMA_RECORDING_ENABLED
17921 if(allocator->GetRecorder() != VMA_NULL)
17923 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17927 allocator->DestroyPool(pool);
17935 VMA_ASSERT(allocator && pool && pPoolStats);
17937 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17939 allocator->GetPoolStats(pool, pPoolStats);
17945 size_t* pLostAllocationCount)
17947 VMA_ASSERT(allocator && pool);
17949 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17951 #if VMA_RECORDING_ENABLED
17952 if(allocator->GetRecorder() != VMA_NULL)
17954 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
17958 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
17963 VMA_ASSERT(allocator && pool);
17965 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17967 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
17969 return allocator->CheckPoolCorruption(pool);
17975 const char** ppName)
17977 VMA_ASSERT(allocator && pool && ppName);
17979 VMA_DEBUG_LOG(
"vmaGetPoolName");
17981 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17983 *ppName = pool->GetName();
17991 VMA_ASSERT(allocator && pool);
17993 VMA_DEBUG_LOG(
"vmaSetPoolName");
17995 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17997 pool->SetName(pName);
17999 #if VMA_RECORDING_ENABLED
18000 if(allocator->GetRecorder() != VMA_NULL)
18002 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18009 const VkMemoryRequirements* pVkMemoryRequirements,
18014 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18016 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18018 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18020 VkResult result = allocator->AllocateMemory(
18021 *pVkMemoryRequirements,
18028 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18032 #if VMA_RECORDING_ENABLED
18033 if(allocator->GetRecorder() != VMA_NULL)
18035 allocator->GetRecorder()->RecordAllocateMemory(
18036 allocator->GetCurrentFrameIndex(),
18037 *pVkMemoryRequirements,
18043 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18045 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18053 const VkMemoryRequirements* pVkMemoryRequirements,
18055 size_t allocationCount,
18059 if(allocationCount == 0)
18064 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18066 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18068 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18070 VkResult result = allocator->AllocateMemory(
18071 *pVkMemoryRequirements,
18078 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18082 #if VMA_RECORDING_ENABLED
18083 if(allocator->GetRecorder() != VMA_NULL)
18085 allocator->GetRecorder()->RecordAllocateMemoryPages(
18086 allocator->GetCurrentFrameIndex(),
18087 *pVkMemoryRequirements,
18089 (uint64_t)allocationCount,
18094 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18096 for(
size_t i = 0; i < allocationCount; ++i)
18098 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18112 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18114 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18116 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18118 VkMemoryRequirements vkMemReq = {};
18119 bool requiresDedicatedAllocation =
false;
18120 bool prefersDedicatedAllocation =
false;
18121 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18122 requiresDedicatedAllocation,
18123 prefersDedicatedAllocation);
18125 VkResult result = allocator->AllocateMemory(
18127 requiresDedicatedAllocation,
18128 prefersDedicatedAllocation,
18133 VMA_SUBALLOCATION_TYPE_BUFFER,
18137 #if VMA_RECORDING_ENABLED
18138 if(allocator->GetRecorder() != VMA_NULL)
18140 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18141 allocator->GetCurrentFrameIndex(),
18143 requiresDedicatedAllocation,
18144 prefersDedicatedAllocation,
18150 if(pAllocationInfo && result == VK_SUCCESS)
18152 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18165 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18167 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18169 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18171 VkMemoryRequirements vkMemReq = {};
18172 bool requiresDedicatedAllocation =
false;
18173 bool prefersDedicatedAllocation =
false;
18174 allocator->GetImageMemoryRequirements(image, vkMemReq,
18175 requiresDedicatedAllocation, prefersDedicatedAllocation);
18177 VkResult result = allocator->AllocateMemory(
18179 requiresDedicatedAllocation,
18180 prefersDedicatedAllocation,
18185 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18189 #if VMA_RECORDING_ENABLED
18190 if(allocator->GetRecorder() != VMA_NULL)
18192 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18193 allocator->GetCurrentFrameIndex(),
18195 requiresDedicatedAllocation,
18196 prefersDedicatedAllocation,
18202 if(pAllocationInfo && result == VK_SUCCESS)
18204 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18214 VMA_ASSERT(allocator);
18216 if(allocation == VK_NULL_HANDLE)
18221 VMA_DEBUG_LOG(
"vmaFreeMemory");
18223 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18225 #if VMA_RECORDING_ENABLED
18226 if(allocator->GetRecorder() != VMA_NULL)
18228 allocator->GetRecorder()->RecordFreeMemory(
18229 allocator->GetCurrentFrameIndex(),
18234 allocator->FreeMemory(
18241 size_t allocationCount,
18244 if(allocationCount == 0)
18249 VMA_ASSERT(allocator);
18251 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18253 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18255 #if VMA_RECORDING_ENABLED
18256 if(allocator->GetRecorder() != VMA_NULL)
18258 allocator->GetRecorder()->RecordFreeMemoryPages(
18259 allocator->GetCurrentFrameIndex(),
18260 (uint64_t)allocationCount,
18265 allocator->FreeMemory(allocationCount, pAllocations);
18271 VkDeviceSize newSize)
18273 VMA_ASSERT(allocator && allocation);
18275 VMA_DEBUG_LOG(
"vmaResizeAllocation");
18277 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18279 return allocator->ResizeAllocation(allocation, newSize);
18287 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18289 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18291 #if VMA_RECORDING_ENABLED
18292 if(allocator->GetRecorder() != VMA_NULL)
18294 allocator->GetRecorder()->RecordGetAllocationInfo(
18295 allocator->GetCurrentFrameIndex(),
18300 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18307 VMA_ASSERT(allocator && allocation);
18309 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18311 #if VMA_RECORDING_ENABLED
18312 if(allocator->GetRecorder() != VMA_NULL)
18314 allocator->GetRecorder()->RecordTouchAllocation(
18315 allocator->GetCurrentFrameIndex(),
18320 return allocator->TouchAllocation(allocation);
18328 VMA_ASSERT(allocator && allocation);
18330 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18332 allocation->SetUserData(allocator, pUserData);
18334 #if VMA_RECORDING_ENABLED
18335 if(allocator->GetRecorder() != VMA_NULL)
18337 allocator->GetRecorder()->RecordSetAllocationUserData(
18338 allocator->GetCurrentFrameIndex(),
18349 VMA_ASSERT(allocator && pAllocation);
18351 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18353 allocator->CreateLostAllocation(pAllocation);
18355 #if VMA_RECORDING_ENABLED
18356 if(allocator->GetRecorder() != VMA_NULL)
18358 allocator->GetRecorder()->RecordCreateLostAllocation(
18359 allocator->GetCurrentFrameIndex(),
18370 VMA_ASSERT(allocator && allocation && ppData);
18372 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18374 VkResult res = allocator->Map(allocation, ppData);
18376 #if VMA_RECORDING_ENABLED
18377 if(allocator->GetRecorder() != VMA_NULL)
18379 allocator->GetRecorder()->RecordMapMemory(
18380 allocator->GetCurrentFrameIndex(),
18392 VMA_ASSERT(allocator && allocation);
18394 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18396 #if VMA_RECORDING_ENABLED
18397 if(allocator->GetRecorder() != VMA_NULL)
18399 allocator->GetRecorder()->RecordUnmapMemory(
18400 allocator->GetCurrentFrameIndex(),
18405 allocator->Unmap(allocation);
18410 VMA_ASSERT(allocator && allocation);
18412 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18414 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18416 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18418 #if VMA_RECORDING_ENABLED
18419 if(allocator->GetRecorder() != VMA_NULL)
18421 allocator->GetRecorder()->RecordFlushAllocation(
18422 allocator->GetCurrentFrameIndex(),
18423 allocation, offset, size);
18432 VMA_ASSERT(allocator && allocation);
18434 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18436 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18438 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18440 #if VMA_RECORDING_ENABLED
18441 if(allocator->GetRecorder() != VMA_NULL)
18443 allocator->GetRecorder()->RecordInvalidateAllocation(
18444 allocator->GetCurrentFrameIndex(),
18445 allocation, offset, size);
18454 uint32_t allocationCount,
18456 const VkDeviceSize* offsets,
18457 const VkDeviceSize* sizes)
18459 VMA_ASSERT(allocator);
18461 if(allocationCount == 0)
18466 VMA_ASSERT(allocations);
18468 VMA_DEBUG_LOG(
"vmaFlushAllocations");
18470 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18472 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
18474 #if VMA_RECORDING_ENABLED
18475 if(allocator->GetRecorder() != VMA_NULL)
18486 uint32_t allocationCount,
18488 const VkDeviceSize* offsets,
18489 const VkDeviceSize* sizes)
18491 VMA_ASSERT(allocator);
18493 if(allocationCount == 0)
18498 VMA_ASSERT(allocations);
18500 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
18502 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18504 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
18506 #if VMA_RECORDING_ENABLED
18507 if(allocator->GetRecorder() != VMA_NULL)
18518 VMA_ASSERT(allocator);
18520 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18522 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18524 return allocator->CheckCorruption(memoryTypeBits);
18530 size_t allocationCount,
18531 VkBool32* pAllocationsChanged,
18541 if(pDefragmentationInfo != VMA_NULL)
18555 if(res == VK_NOT_READY)
18568 VMA_ASSERT(allocator && pInfo && pContext);
18579 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18581 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18583 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18585 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18587 #if VMA_RECORDING_ENABLED
18588 if(allocator->GetRecorder() != VMA_NULL)
18590 allocator->GetRecorder()->RecordDefragmentationBegin(
18591 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18602 VMA_ASSERT(allocator);
18604 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18606 if(context != VK_NULL_HANDLE)
18608 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18610 #if VMA_RECORDING_ENABLED
18611 if(allocator->GetRecorder() != VMA_NULL)
18613 allocator->GetRecorder()->RecordDefragmentationEnd(
18614 allocator->GetCurrentFrameIndex(), context);
18618 return allocator->DefragmentationEnd(context);
18632 VMA_ASSERT(allocator);
18634 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
moveCount, pInfo->
pMoves));
18636 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18638 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18640 if(context == VK_NULL_HANDLE)
18646 return allocator->DefragmentationPassBegin(pInfo, context);
18652 VMA_ASSERT(allocator);
18654 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18655 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18657 if(context == VK_NULL_HANDLE)
18660 return allocator->DefragmentationPassEnd(context);
18668 VMA_ASSERT(allocator && allocation && buffer);
18670 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18672 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18674 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18680 VkDeviceSize allocationLocalOffset,
18684 VMA_ASSERT(allocator && allocation && buffer);
18686 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18688 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18690 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18698 VMA_ASSERT(allocator && allocation && image);
18700 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18702 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18704 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18710 VkDeviceSize allocationLocalOffset,
18714 VMA_ASSERT(allocator && allocation && image);
18716 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18718 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18720 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18725 const VkBufferCreateInfo* pBufferCreateInfo,
18731 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18733 if(pBufferCreateInfo->size == 0)
18735 return VK_ERROR_VALIDATION_FAILED_EXT;
18737 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18738 !allocator->m_UseKhrBufferDeviceAddress)
18740 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18741 return VK_ERROR_VALIDATION_FAILED_EXT;
18744 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18746 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18748 *pBuffer = VK_NULL_HANDLE;
18749 *pAllocation = VK_NULL_HANDLE;
18752 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18753 allocator->m_hDevice,
18755 allocator->GetAllocationCallbacks(),
18760 VkMemoryRequirements vkMemReq = {};
18761 bool requiresDedicatedAllocation =
false;
18762 bool prefersDedicatedAllocation =
false;
18763 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18764 requiresDedicatedAllocation, prefersDedicatedAllocation);
18767 res = allocator->AllocateMemory(
18769 requiresDedicatedAllocation,
18770 prefersDedicatedAllocation,
18772 pBufferCreateInfo->usage,
18774 *pAllocationCreateInfo,
18775 VMA_SUBALLOCATION_TYPE_BUFFER,
18779 #if VMA_RECORDING_ENABLED
18780 if(allocator->GetRecorder() != VMA_NULL)
18782 allocator->GetRecorder()->RecordCreateBuffer(
18783 allocator->GetCurrentFrameIndex(),
18784 *pBufferCreateInfo,
18785 *pAllocationCreateInfo,
18795 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18800 #if VMA_STATS_STRING_ENABLED
18801 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18803 if(pAllocationInfo != VMA_NULL)
18805 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18810 allocator->FreeMemory(
18813 *pAllocation = VK_NULL_HANDLE;
18814 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18815 *pBuffer = VK_NULL_HANDLE;
18818 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18819 *pBuffer = VK_NULL_HANDLE;
18830 VMA_ASSERT(allocator);
18832 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18837 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18839 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18841 #if VMA_RECORDING_ENABLED
18842 if(allocator->GetRecorder() != VMA_NULL)
18844 allocator->GetRecorder()->RecordDestroyBuffer(
18845 allocator->GetCurrentFrameIndex(),
18850 if(buffer != VK_NULL_HANDLE)
18852 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18855 if(allocation != VK_NULL_HANDLE)
18857 allocator->FreeMemory(
18865 const VkImageCreateInfo* pImageCreateInfo,
18871 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18873 if(pImageCreateInfo->extent.width == 0 ||
18874 pImageCreateInfo->extent.height == 0 ||
18875 pImageCreateInfo->extent.depth == 0 ||
18876 pImageCreateInfo->mipLevels == 0 ||
18877 pImageCreateInfo->arrayLayers == 0)
18879 return VK_ERROR_VALIDATION_FAILED_EXT;
18882 VMA_DEBUG_LOG(
"vmaCreateImage");
18884 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18886 *pImage = VK_NULL_HANDLE;
18887 *pAllocation = VK_NULL_HANDLE;
18890 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18891 allocator->m_hDevice,
18893 allocator->GetAllocationCallbacks(),
18897 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18898 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18899 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18902 VkMemoryRequirements vkMemReq = {};
18903 bool requiresDedicatedAllocation =
false;
18904 bool prefersDedicatedAllocation =
false;
18905 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18906 requiresDedicatedAllocation, prefersDedicatedAllocation);
18908 res = allocator->AllocateMemory(
18910 requiresDedicatedAllocation,
18911 prefersDedicatedAllocation,
18915 *pAllocationCreateInfo,
18920 #if VMA_RECORDING_ENABLED
18921 if(allocator->GetRecorder() != VMA_NULL)
18923 allocator->GetRecorder()->RecordCreateImage(
18924 allocator->GetCurrentFrameIndex(),
18926 *pAllocationCreateInfo,
18936 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18941 #if VMA_STATS_STRING_ENABLED
18942 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18944 if(pAllocationInfo != VMA_NULL)
18946 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18951 allocator->FreeMemory(
18954 *pAllocation = VK_NULL_HANDLE;
18955 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18956 *pImage = VK_NULL_HANDLE;
18959 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18960 *pImage = VK_NULL_HANDLE;
18971 VMA_ASSERT(allocator);
18973 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18978 VMA_DEBUG_LOG(
"vmaDestroyImage");
18980 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18982 #if VMA_RECORDING_ENABLED
18983 if(allocator->GetRecorder() != VMA_NULL)
18985 allocator->GetRecorder()->RecordDestroyImage(
18986 allocator->GetCurrentFrameIndex(),
18991 if(image != VK_NULL_HANDLE)
18993 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
18995 if(allocation != VK_NULL_HANDLE)
18997 allocator->FreeMemory(
19003 #endif // #ifdef VMA_IMPLEMENTATION