23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2028 #ifndef VMA_RECORDING_ENABLED
2029 #define VMA_RECORDING_ENABLED 0
2032 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2036 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2037 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2038 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2039 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2040 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2041 extern PFN_vkAllocateMemory vkAllocateMemory;
2042 extern PFN_vkFreeMemory vkFreeMemory;
2043 extern PFN_vkMapMemory vkMapMemory;
2044 extern PFN_vkUnmapMemory vkUnmapMemory;
2045 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2046 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2047 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2048 extern PFN_vkBindImageMemory vkBindImageMemory;
2049 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2050 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2051 extern PFN_vkCreateBuffer vkCreateBuffer;
2052 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2053 extern PFN_vkCreateImage vkCreateImage;
2054 extern PFN_vkDestroyImage vkDestroyImage;
2055 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2056 #if VMA_VULKAN_VERSION >= 1001000
2057 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2058 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2059 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2060 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2061 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2066 #include <vulkan/vulkan.h>
2072 #if !defined(VMA_VULKAN_VERSION)
2073 #if defined(VK_VERSION_1_2)
2074 #define VMA_VULKAN_VERSION 1002000
2075 #elif defined(VK_VERSION_1_1)
2076 #define VMA_VULKAN_VERSION 1001000
2078 #define VMA_VULKAN_VERSION 1000000
2082 #if !defined(VMA_DEDICATED_ALLOCATION)
2083 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2084 #define VMA_DEDICATED_ALLOCATION 1
2086 #define VMA_DEDICATED_ALLOCATION 0
2090 #if !defined(VMA_BIND_MEMORY2)
2091 #if VK_KHR_bind_memory2
2092 #define VMA_BIND_MEMORY2 1
2094 #define VMA_BIND_MEMORY2 0
2098 #if !defined(VMA_MEMORY_BUDGET)
2099 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2100 #define VMA_MEMORY_BUDGET 1
2102 #define VMA_MEMORY_BUDGET 0
2107 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2108 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2109 #define VMA_BUFFER_DEVICE_ADDRESS 1
2111 #define VMA_BUFFER_DEVICE_ADDRESS 0
2116 #if !defined(VMA_MEMORY_PRIORITY)
2117 #if VK_EXT_memory_priority
2118 #define VMA_MEMORY_PRIORITY 1
2120 #define VMA_MEMORY_PRIORITY 0
2129 #ifndef VMA_CALL_PRE
2130 #define VMA_CALL_PRE
2132 #ifndef VMA_CALL_POST
2133 #define VMA_CALL_POST
2147 #ifndef VMA_LEN_IF_NOT_NULL
2148 #define VMA_LEN_IF_NOT_NULL(len)
2153 #ifndef VMA_NULLABLE
2155 #define VMA_NULLABLE _Nullable
2157 #define VMA_NULLABLE
2163 #ifndef VMA_NOT_NULL
2165 #define VMA_NOT_NULL _Nonnull
2167 #define VMA_NOT_NULL
2173 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2174 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2175 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2177 #define VMA_NOT_NULL_NON_DISPATCHABLE
2181 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2182 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2183 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2185 #define VMA_NULLABLE_NON_DISPATCHABLE
2203 uint32_t memoryType,
2204 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2206 void* VMA_NULLABLE pUserData);
2210 uint32_t memoryType,
2211 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2213 void* VMA_NULLABLE pUserData);
2370 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2371 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2372 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2374 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2375 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2376 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2378 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2379 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2469 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2542 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2550 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2560 uint32_t memoryTypeIndex,
2561 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2573 uint32_t frameIndex);
2669 #ifndef VMA_STATS_STRING_ENABLED
2670 #define VMA_STATS_STRING_ENABLED 1
2673 #if VMA_STATS_STRING_ENABLED
2680 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2681 VkBool32 detailedMap);
2685 char* VMA_NULLABLE pStatsString);
2946 uint32_t memoryTypeBits,
2948 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2964 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2966 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2982 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2984 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3134 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3162 size_t* VMA_NULLABLE pLostAllocationCount);
3189 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3199 const char* VMA_NULLABLE pName);
3293 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3319 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3321 size_t allocationCount,
3322 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3323 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3333 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3341 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3366 size_t allocationCount,
3367 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3424 void* VMA_NULLABLE pUserData);
3481 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3519 VkDeviceSize offset,
3546 VkDeviceSize offset,
3565 uint32_t allocationCount,
3566 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3567 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3568 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3586 uint32_t allocationCount,
3587 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3588 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3589 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3668 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3702 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3840 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3841 size_t allocationCount,
3842 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3861 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3876 VkDeviceSize allocationLocalOffset,
3877 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3878 const void* VMA_NULLABLE pNext);
3895 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3910 VkDeviceSize allocationLocalOffset,
3911 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3912 const void* VMA_NULLABLE pNext);
3946 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3948 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3965 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3971 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3973 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3990 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
4000 #if defined(__cplusplus) && defined(__INTELLISENSE__)
4001 #define VMA_IMPLEMENTATION
4004 #ifdef VMA_IMPLEMENTATION
4005 #undef VMA_IMPLEMENTATION
4012 #if VMA_RECORDING_ENABLED
4015 #include <windows.h>
4035 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4036 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4045 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4046 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4047 #if defined(VK_NO_PROTOTYPES)
4048 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4049 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4062 #if VMA_USE_STL_CONTAINERS
4063 #define VMA_USE_STL_VECTOR 1
4064 #define VMA_USE_STL_UNORDERED_MAP 1
4065 #define VMA_USE_STL_LIST 1
4068 #ifndef VMA_USE_STL_SHARED_MUTEX
4070 #if __cplusplus >= 201703L
4071 #define VMA_USE_STL_SHARED_MUTEX 1
4075 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4076 #define VMA_USE_STL_SHARED_MUTEX 1
4078 #define VMA_USE_STL_SHARED_MUTEX 0
4086 #if VMA_USE_STL_VECTOR
4090 #if VMA_USE_STL_UNORDERED_MAP
4091 #include <unordered_map>
4094 #if VMA_USE_STL_LIST
4103 #include <algorithm>
4108 #define VMA_NULL nullptr
4111 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4113 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4116 if(alignment <
sizeof(
void*))
4118 alignment =
sizeof(
void*);
4121 return memalign(alignment, size);
4123 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4126 #if defined(__APPLE__)
4127 #include <AvailabilityMacros.h>
4130 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4132 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4133 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4140 if (__builtin_available(macOS 10.15, iOS 13, *))
4141 return aligned_alloc(alignment, size);
4145 if(alignment <
sizeof(
void*))
4147 alignment =
sizeof(
void*);
4151 if(posix_memalign(&pointer, alignment, size) == 0)
4155 #elif defined(_WIN32)
4156 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4158 return _aligned_malloc(size, alignment);
4161 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4163 return aligned_alloc(alignment, size);
4168 static void vma_aligned_free(
void* ptr)
4173 static void vma_aligned_free(
void* VMA_NULLABLE ptr)
4187 #define VMA_ASSERT(expr)
4189 #define VMA_ASSERT(expr) assert(expr)
4195 #ifndef VMA_HEAVY_ASSERT
4197 #define VMA_HEAVY_ASSERT(expr)
4199 #define VMA_HEAVY_ASSERT(expr)
4203 #ifndef VMA_ALIGN_OF
4204 #define VMA_ALIGN_OF(type) (__alignof(type))
4207 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4208 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4211 #ifndef VMA_SYSTEM_ALIGNED_FREE
4213 #if defined(VMA_SYSTEM_FREE)
4214 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4216 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4221 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4225 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4229 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4233 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4236 #ifndef VMA_DEBUG_LOG
4237 #define VMA_DEBUG_LOG(format, ...)
4247 #if VMA_STATS_STRING_ENABLED
4248 static inline void VmaUint32ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint32_t num)
4250 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4252 static inline void VmaUint64ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint64_t num)
4254 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4256 static inline void VmaPtrToStr(
char* VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
4258 snprintf(outStr, strLen,
"%p", ptr);
4266 void Lock() { m_Mutex.lock(); }
4267 void Unlock() { m_Mutex.unlock(); }
4268 bool TryLock() {
return m_Mutex.try_lock(); }
4272 #define VMA_MUTEX VmaMutex
4276 #ifndef VMA_RW_MUTEX
4277 #if VMA_USE_STL_SHARED_MUTEX
4279 #include <shared_mutex>
4283 void LockRead() { m_Mutex.lock_shared(); }
4284 void UnlockRead() { m_Mutex.unlock_shared(); }
4285 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4286 void LockWrite() { m_Mutex.lock(); }
4287 void UnlockWrite() { m_Mutex.unlock(); }
4288 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4290 std::shared_mutex m_Mutex;
4292 #define VMA_RW_MUTEX VmaRWMutex
4293 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4299 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4300 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4301 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4302 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4303 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4304 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4305 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4309 #define VMA_RW_MUTEX VmaRWMutex
4315 void LockRead() { m_Mutex.Lock(); }
4316 void UnlockRead() { m_Mutex.Unlock(); }
4317 bool TryLockRead() {
return m_Mutex.TryLock(); }
4318 void LockWrite() { m_Mutex.Lock(); }
4319 void UnlockWrite() { m_Mutex.Unlock(); }
4320 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4324 #define VMA_RW_MUTEX VmaRWMutex
4331 #ifndef VMA_ATOMIC_UINT32
4333 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4336 #ifndef VMA_ATOMIC_UINT64
4338 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4341 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4346 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4349 #ifndef VMA_DEBUG_ALIGNMENT
4354 #define VMA_DEBUG_ALIGNMENT (1)
4357 #ifndef VMA_DEBUG_MARGIN
4362 #define VMA_DEBUG_MARGIN (0)
4365 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4370 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4373 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4379 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4382 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4387 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4390 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4395 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4398 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
4403 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
4406 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4408 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4411 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4413 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4416 #ifndef VMA_CLASS_NO_COPY
4417 #define VMA_CLASS_NO_COPY(className) \
4419 className(const className&) = delete; \
4420 className& operator=(const className&) = delete;
4423 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4426 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4428 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4429 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4437 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4438 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4439 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4441 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4443 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4444 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4447 static inline uint32_t VmaCountBitsSet(uint32_t v)
4449 uint32_t c = v - ((v >> 1) & 0x55555555);
4450 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4451 c = ((c >> 4) + c) & 0x0F0F0F0F;
4452 c = ((c >> 8) + c) & 0x00FF00FF;
4453 c = ((c >> 16) + c) & 0x0000FFFF;
4462 template <
typename T>
4463 inline bool VmaIsPow2(T x)
4465 return (x & (x-1)) == 0;
4470 template <
typename T>
4471 static inline T VmaAlignUp(T val, T alignment)
4473 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4474 return (val + alignment - 1) & ~(alignment - 1);
4478 template <
typename T>
4479 static inline T VmaAlignDown(T val, T alignment)
4481 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4482 return val & ~(alignment - 1);
4486 template <
typename T>
4487 static inline T VmaRoundDiv(T x, T y)
4489 return (x + (y / (T)2)) / y;
4493 static inline uint32_t VmaNextPow2(uint32_t v)
4504 static inline uint64_t VmaNextPow2(uint64_t v)
4518 static inline uint32_t VmaPrevPow2(uint32_t v)
4528 static inline uint64_t VmaPrevPow2(uint64_t v)
4540 static inline bool VmaStrIsEmpty(
const char* pStr)
4542 return pStr == VMA_NULL || *pStr ==
'\0';
4545 #if VMA_STATS_STRING_ENABLED
4547 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4567 template<
typename Iterator,
typename Compare>
4568 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4570 Iterator centerValue = end; --centerValue;
4571 Iterator insertIndex = beg;
4572 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4574 if(cmp(*memTypeIndex, *centerValue))
4576 if(insertIndex != memTypeIndex)
4578 VMA_SWAP(*memTypeIndex, *insertIndex);
4583 if(insertIndex != centerValue)
4585 VMA_SWAP(*insertIndex, *centerValue);
4590 template<
typename Iterator,
typename Compare>
4591 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4595 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4596 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4597 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4601 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4612 static inline bool VmaBlocksOnSamePage(
4613 VkDeviceSize resourceAOffset,
4614 VkDeviceSize resourceASize,
4615 VkDeviceSize resourceBOffset,
4616 VkDeviceSize pageSize)
4618 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4619 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4620 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4621 VkDeviceSize resourceBStart = resourceBOffset;
4622 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4623 return resourceAEndPage == resourceBStartPage;
4626 enum VmaSuballocationType
4628 VMA_SUBALLOCATION_TYPE_FREE = 0,
4629 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4630 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4631 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4632 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4633 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4634 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4643 static inline bool VmaIsBufferImageGranularityConflict(
4644 VmaSuballocationType suballocType1,
4645 VmaSuballocationType suballocType2)
4647 if(suballocType1 > suballocType2)
4649 VMA_SWAP(suballocType1, suballocType2);
4652 switch(suballocType1)
4654 case VMA_SUBALLOCATION_TYPE_FREE:
4656 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4658 case VMA_SUBALLOCATION_TYPE_BUFFER:
4660 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4661 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4662 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4664 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4665 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4666 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4667 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4669 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4670 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4678 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4680 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4681 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4682 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4683 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4685 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4692 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4694 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4695 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4696 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4697 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4699 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4712 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4714 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4715 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4716 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4717 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4723 VMA_CLASS_NO_COPY(VmaMutexLock)
4725 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4726 m_pMutex(useMutex ? &mutex : VMA_NULL)
4727 {
if(m_pMutex) { m_pMutex->Lock(); } }
4729 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4731 VMA_MUTEX* m_pMutex;
4735 struct VmaMutexLockRead
4737 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4739 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4740 m_pMutex(useMutex ? &mutex : VMA_NULL)
4741 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4742 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4744 VMA_RW_MUTEX* m_pMutex;
4748 struct VmaMutexLockWrite
4750 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4752 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4753 m_pMutex(useMutex ? &mutex : VMA_NULL)
4754 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4755 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4757 VMA_RW_MUTEX* m_pMutex;
4760 #if VMA_DEBUG_GLOBAL_MUTEX
4761 static VMA_MUTEX gDebugGlobalMutex;
4762 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4764 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4768 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4779 template <
typename CmpLess,
typename IterT,
typename KeyT>
4780 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4782 size_t down = 0, up = (end - beg);
4785 const size_t mid = down + (up - down) / 2;
4786 if(cmp(*(beg+mid), key))
4798 template<
typename CmpLess,
typename IterT,
typename KeyT>
4799 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4801 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4802 beg, end, value, cmp);
4804 (!cmp(*it, value) && !cmp(value, *it)))
4816 template<
typename T>
4817 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4819 for(uint32_t i = 0; i < count; ++i)
4821 const T iPtr = arr[i];
4822 if(iPtr == VMA_NULL)
4826 for(uint32_t j = i + 1; j < count; ++j)
4837 template<
typename MainT,
typename NewT>
4838 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4840 newStruct->pNext = mainStruct->pNext;
4841 mainStruct->pNext = newStruct;
4847 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4849 void* result = VMA_NULL;
4850 if((pAllocationCallbacks != VMA_NULL) &&
4851 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4853 result = (*pAllocationCallbacks->pfnAllocation)(
4854 pAllocationCallbacks->pUserData,
4857 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4861 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4863 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4867 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4869 if((pAllocationCallbacks != VMA_NULL) &&
4870 (pAllocationCallbacks->pfnFree != VMA_NULL))
4872 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4876 VMA_SYSTEM_ALIGNED_FREE(ptr);
4880 template<
typename T>
4881 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4883 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4886 template<
typename T>
4887 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4889 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4892 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4894 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4896 template<
typename T>
4897 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4900 VmaFree(pAllocationCallbacks, ptr);
4903 template<
typename T>
4904 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4908 for(
size_t i = count; i--; )
4912 VmaFree(pAllocationCallbacks, ptr);
4916 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4918 if(srcStr != VMA_NULL)
4920 const size_t len = strlen(srcStr);
4921 char*
const result = vma_new_array(allocs,
char, len + 1);
4922 memcpy(result, srcStr, len + 1);
4931 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4935 const size_t len = strlen(str);
4936 vma_delete_array(allocs, str, len + 1);
4941 template<
typename T>
4942 class VmaStlAllocator
4945 const VkAllocationCallbacks*
const m_pCallbacks;
4946 typedef T value_type;
4948 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4949 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4951 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4952 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4954 template<
typename U>
4955 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4957 return m_pCallbacks == rhs.m_pCallbacks;
4959 template<
typename U>
4960 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4962 return m_pCallbacks != rhs.m_pCallbacks;
4965 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4966 VmaStlAllocator(
const VmaStlAllocator&) =
default;
4969 #if VMA_USE_STL_VECTOR
4971 #define VmaVector std::vector
4973 template<
typename T,
typename allocatorT>
4974 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4976 vec.insert(vec.begin() + index, item);
4979 template<
typename T,
typename allocatorT>
4980 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4982 vec.erase(vec.begin() + index);
4990 template<
typename T,
typename AllocatorT>
4994 typedef T value_type;
4996 VmaVector(
const AllocatorT& allocator) :
4997 m_Allocator(allocator),
5004 VmaVector(
size_t count,
const AllocatorT& allocator) :
5005 m_Allocator(allocator),
5006 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
5014 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
5015 : VmaVector(count, allocator) {}
5017 VmaVector(
const VmaVector<T, AllocatorT>& src) :
5018 m_Allocator(src.m_Allocator),
5019 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
5020 m_Count(src.m_Count),
5021 m_Capacity(src.m_Count)
5025 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
5031 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5034 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5038 resize(rhs.m_Count);
5041 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5047 bool empty()
const {
return m_Count == 0; }
5048 size_t size()
const {
return m_Count; }
5049 T* data() {
return m_pArray; }
5050 const T* data()
const {
return m_pArray; }
5052 T& operator[](
size_t index)
5054 VMA_HEAVY_ASSERT(index < m_Count);
5055 return m_pArray[index];
5057 const T& operator[](
size_t index)
const
5059 VMA_HEAVY_ASSERT(index < m_Count);
5060 return m_pArray[index];
5065 VMA_HEAVY_ASSERT(m_Count > 0);
5068 const T& front()
const
5070 VMA_HEAVY_ASSERT(m_Count > 0);
5075 VMA_HEAVY_ASSERT(m_Count > 0);
5076 return m_pArray[m_Count - 1];
5078 const T& back()
const
5080 VMA_HEAVY_ASSERT(m_Count > 0);
5081 return m_pArray[m_Count - 1];
5084 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5086 newCapacity = VMA_MAX(newCapacity, m_Count);
5088 if((newCapacity < m_Capacity) && !freeMemory)
5090 newCapacity = m_Capacity;
5093 if(newCapacity != m_Capacity)
5095 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5098 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5100 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5101 m_Capacity = newCapacity;
5102 m_pArray = newArray;
5106 void resize(
size_t newCount)
5108 size_t newCapacity = m_Capacity;
5109 if(newCount > m_Capacity)
5111 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5114 if(newCapacity != m_Capacity)
5116 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5117 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5118 if(elementsToCopy != 0)
5120 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5122 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5123 m_Capacity = newCapacity;
5124 m_pArray = newArray;
5135 void shrink_to_fit()
5137 if(m_Capacity > m_Count)
5139 T* newArray = VMA_NULL;
5142 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
5143 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5145 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5146 m_Capacity = m_Count;
5147 m_pArray = newArray;
5151 void insert(
size_t index,
const T& src)
5153 VMA_HEAVY_ASSERT(index <= m_Count);
5154 const size_t oldCount = size();
5155 resize(oldCount + 1);
5156 if(index < oldCount)
5158 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5160 m_pArray[index] = src;
5163 void remove(
size_t index)
5165 VMA_HEAVY_ASSERT(index < m_Count);
5166 const size_t oldCount = size();
5167 if(index < oldCount - 1)
5169 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5171 resize(oldCount - 1);
5174 void push_back(
const T& src)
5176 const size_t newIndex = size();
5177 resize(newIndex + 1);
5178 m_pArray[newIndex] = src;
5183 VMA_HEAVY_ASSERT(m_Count > 0);
5187 void push_front(
const T& src)
5194 VMA_HEAVY_ASSERT(m_Count > 0);
5198 typedef T* iterator;
5200 iterator begin() {
return m_pArray; }
5201 iterator end() {
return m_pArray + m_Count; }
5204 AllocatorT m_Allocator;
5210 template<
typename T,
typename allocatorT>
5211 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5213 vec.insert(index, item);
5216 template<
typename T,
typename allocatorT>
5217 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5224 template<
typename CmpLess,
typename VectorT>
5225 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5227 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5229 vector.data() + vector.size(),
5231 CmpLess()) - vector.data();
5232 VmaVectorInsert(vector, indexToInsert, value);
5233 return indexToInsert;
5236 template<
typename CmpLess,
typename VectorT>
5237 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5240 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5245 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5247 size_t indexToRemove = it - vector.begin();
5248 VmaVectorRemove(vector, indexToRemove);
5265 template<
typename T,
typename AllocatorT,
size_t N>
5266 class VmaSmallVector
5269 typedef T value_type;
5271 VmaSmallVector(
const AllocatorT& allocator) :
5273 m_DynamicArray(allocator)
5276 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5278 m_DynamicArray(count > N ? count : 0, allocator)
5281 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5282 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5283 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5284 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5286 bool empty()
const {
return m_Count == 0; }
5287 size_t size()
const {
return m_Count; }
5288 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5289 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5291 T& operator[](
size_t index)
5293 VMA_HEAVY_ASSERT(index < m_Count);
5294 return data()[index];
5296 const T& operator[](
size_t index)
const
5298 VMA_HEAVY_ASSERT(index < m_Count);
5299 return data()[index];
5304 VMA_HEAVY_ASSERT(m_Count > 0);
5307 const T& front()
const
5309 VMA_HEAVY_ASSERT(m_Count > 0);
5314 VMA_HEAVY_ASSERT(m_Count > 0);
5315 return data()[m_Count - 1];
5317 const T& back()
const
5319 VMA_HEAVY_ASSERT(m_Count > 0);
5320 return data()[m_Count - 1];
5323 void resize(
size_t newCount,
bool freeMemory =
false)
5325 if(newCount > N && m_Count > N)
5328 m_DynamicArray.resize(newCount);
5331 m_DynamicArray.shrink_to_fit();
5334 else if(newCount > N && m_Count <= N)
5337 m_DynamicArray.resize(newCount);
5340 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5343 else if(newCount <= N && m_Count > N)
5348 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5350 m_DynamicArray.resize(0);
5353 m_DynamicArray.shrink_to_fit();
5363 void clear(
bool freeMemory =
false)
5365 m_DynamicArray.clear();
5368 m_DynamicArray.shrink_to_fit();
5373 void insert(
size_t index,
const T& src)
5375 VMA_HEAVY_ASSERT(index <= m_Count);
5376 const size_t oldCount = size();
5377 resize(oldCount + 1);
5378 T*
const dataPtr = data();
5379 if(index < oldCount)
5382 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5384 dataPtr[index] = src;
5387 void remove(
size_t index)
5389 VMA_HEAVY_ASSERT(index < m_Count);
5390 const size_t oldCount = size();
5391 if(index < oldCount - 1)
5394 T*
const dataPtr = data();
5395 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5397 resize(oldCount - 1);
5400 void push_back(
const T& src)
5402 const size_t newIndex = size();
5403 resize(newIndex + 1);
5404 data()[newIndex] = src;
5409 VMA_HEAVY_ASSERT(m_Count > 0);
5413 void push_front(
const T& src)
5420 VMA_HEAVY_ASSERT(m_Count > 0);
5424 typedef T* iterator;
5426 iterator begin() {
return data(); }
5427 iterator end() {
return data() + m_Count; }
5432 VmaVector<T, AllocatorT> m_DynamicArray;
5443 template<
typename T>
5444 class VmaPoolAllocator
5446 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5448 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5449 ~VmaPoolAllocator();
5450 template<
typename... Types> T* Alloc(Types... args);
5456 uint32_t NextFreeIndex;
5457 alignas(T)
char Value[
sizeof(T)];
5464 uint32_t FirstFreeIndex;
5467 const VkAllocationCallbacks* m_pAllocationCallbacks;
5468 const uint32_t m_FirstBlockCapacity;
5469 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5471 ItemBlock& CreateNewBlock();
5474 template<
typename T>
5475 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5476 m_pAllocationCallbacks(pAllocationCallbacks),
5477 m_FirstBlockCapacity(firstBlockCapacity),
5478 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5480 VMA_ASSERT(m_FirstBlockCapacity > 1);
5483 template<
typename T>
5484 VmaPoolAllocator<T>::~VmaPoolAllocator()
5486 for(
size_t i = m_ItemBlocks.size(); i--; )
5487 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5488 m_ItemBlocks.clear();
5491 template<
typename T>
5492 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5494 for(
size_t i = m_ItemBlocks.size(); i--; )
5496 ItemBlock& block = m_ItemBlocks[i];
5498 if(block.FirstFreeIndex != UINT32_MAX)
5500 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5501 block.FirstFreeIndex = pItem->NextFreeIndex;
5502 T* result = (T*)&pItem->Value;
5503 new(result)T(std::forward<Types>(args)...);
5509 ItemBlock& newBlock = CreateNewBlock();
5510 Item*
const pItem = &newBlock.pItems[0];
5511 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5512 T* result = (T*)&pItem->Value;
5513 new(result)T(std::forward<Types>(args)...);
5517 template<
typename T>
5518 void VmaPoolAllocator<T>::Free(T* ptr)
5521 for(
size_t i = m_ItemBlocks.size(); i--; )
5523 ItemBlock& block = m_ItemBlocks[i];
5527 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5530 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5533 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5534 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5535 block.FirstFreeIndex = index;
5539 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5542 template<
typename T>
5543 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5545 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5546 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5548 const ItemBlock newBlock = {
5549 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5553 m_ItemBlocks.push_back(newBlock);
5556 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5557 newBlock.pItems[i].NextFreeIndex = i + 1;
5558 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5559 return m_ItemBlocks.back();
5565 #if VMA_USE_STL_LIST
5567 #define VmaList std::list
5571 template<
typename T>
5580 template<
typename T>
5583 VMA_CLASS_NO_COPY(VmaRawList)
5585 typedef VmaListItem<T> ItemType;
5587 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5591 size_t GetCount()
const {
return m_Count; }
5592 bool IsEmpty()
const {
return m_Count == 0; }
5594 ItemType* Front() {
return m_pFront; }
5595 const ItemType* Front()
const {
return m_pFront; }
5596 ItemType* Back() {
return m_pBack; }
5597 const ItemType* Back()
const {
return m_pBack; }
5599 ItemType* PushBack();
5600 ItemType* PushFront();
5601 ItemType* PushBack(
const T& value);
5602 ItemType* PushFront(
const T& value);
5607 ItemType* InsertBefore(ItemType* pItem);
5609 ItemType* InsertAfter(ItemType* pItem);
5611 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5612 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5614 void Remove(ItemType* pItem);
5617 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5618 VmaPoolAllocator<ItemType> m_ItemAllocator;
5624 template<
typename T>
5625 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5626 m_pAllocationCallbacks(pAllocationCallbacks),
5627 m_ItemAllocator(pAllocationCallbacks, 128),
5634 template<
typename T>
5635 VmaRawList<T>::~VmaRawList()
5641 template<
typename T>
5642 void VmaRawList<T>::Clear()
5644 if(IsEmpty() ==
false)
5646 ItemType* pItem = m_pBack;
5647 while(pItem != VMA_NULL)
5649 ItemType*
const pPrevItem = pItem->pPrev;
5650 m_ItemAllocator.Free(pItem);
5653 m_pFront = VMA_NULL;
5659 template<
typename T>
5660 VmaListItem<T>* VmaRawList<T>::PushBack()
5662 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5663 pNewItem->pNext = VMA_NULL;
5666 pNewItem->pPrev = VMA_NULL;
5667 m_pFront = pNewItem;
5673 pNewItem->pPrev = m_pBack;
5674 m_pBack->pNext = pNewItem;
5681 template<
typename T>
5682 VmaListItem<T>* VmaRawList<T>::PushFront()
5684 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5685 pNewItem->pPrev = VMA_NULL;
5688 pNewItem->pNext = VMA_NULL;
5689 m_pFront = pNewItem;
5695 pNewItem->pNext = m_pFront;
5696 m_pFront->pPrev = pNewItem;
5697 m_pFront = pNewItem;
5703 template<
typename T>
5704 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5706 ItemType*
const pNewItem = PushBack();
5707 pNewItem->Value = value;
5711 template<
typename T>
5712 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5714 ItemType*
const pNewItem = PushFront();
5715 pNewItem->Value = value;
5719 template<
typename T>
5720 void VmaRawList<T>::PopBack()
5722 VMA_HEAVY_ASSERT(m_Count > 0);
5723 ItemType*
const pBackItem = m_pBack;
5724 ItemType*
const pPrevItem = pBackItem->pPrev;
5725 if(pPrevItem != VMA_NULL)
5727 pPrevItem->pNext = VMA_NULL;
5729 m_pBack = pPrevItem;
5730 m_ItemAllocator.Free(pBackItem);
5734 template<
typename T>
5735 void VmaRawList<T>::PopFront()
5737 VMA_HEAVY_ASSERT(m_Count > 0);
5738 ItemType*
const pFrontItem = m_pFront;
5739 ItemType*
const pNextItem = pFrontItem->pNext;
5740 if(pNextItem != VMA_NULL)
5742 pNextItem->pPrev = VMA_NULL;
5744 m_pFront = pNextItem;
5745 m_ItemAllocator.Free(pFrontItem);
5749 template<
typename T>
5750 void VmaRawList<T>::Remove(ItemType* pItem)
5752 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5753 VMA_HEAVY_ASSERT(m_Count > 0);
5755 if(pItem->pPrev != VMA_NULL)
5757 pItem->pPrev->pNext = pItem->pNext;
5761 VMA_HEAVY_ASSERT(m_pFront == pItem);
5762 m_pFront = pItem->pNext;
5765 if(pItem->pNext != VMA_NULL)
5767 pItem->pNext->pPrev = pItem->pPrev;
5771 VMA_HEAVY_ASSERT(m_pBack == pItem);
5772 m_pBack = pItem->pPrev;
5775 m_ItemAllocator.Free(pItem);
5779 template<
typename T>
5780 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5782 if(pItem != VMA_NULL)
5784 ItemType*
const prevItem = pItem->pPrev;
5785 ItemType*
const newItem = m_ItemAllocator.Alloc();
5786 newItem->pPrev = prevItem;
5787 newItem->pNext = pItem;
5788 pItem->pPrev = newItem;
5789 if(prevItem != VMA_NULL)
5791 prevItem->pNext = newItem;
5795 VMA_HEAVY_ASSERT(m_pFront == pItem);
5805 template<
typename T>
5806 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5808 if(pItem != VMA_NULL)
5810 ItemType*
const nextItem = pItem->pNext;
5811 ItemType*
const newItem = m_ItemAllocator.Alloc();
5812 newItem->pNext = nextItem;
5813 newItem->pPrev = pItem;
5814 pItem->pNext = newItem;
5815 if(nextItem != VMA_NULL)
5817 nextItem->pPrev = newItem;
5821 VMA_HEAVY_ASSERT(m_pBack == pItem);
5831 template<
typename T>
5832 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5834 ItemType*
const newItem = InsertBefore(pItem);
5835 newItem->Value = value;
5839 template<
typename T>
5840 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5842 ItemType*
const newItem = InsertAfter(pItem);
5843 newItem->Value = value;
5847 template<
typename T,
typename AllocatorT>
5850 VMA_CLASS_NO_COPY(VmaList)
5861 T& operator*()
const
5863 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5864 return m_pItem->Value;
5866 T* operator->()
const
5868 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5869 return &m_pItem->Value;
5872 iterator& operator++()
5874 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5875 m_pItem = m_pItem->pNext;
5878 iterator& operator--()
5880 if(m_pItem != VMA_NULL)
5882 m_pItem = m_pItem->pPrev;
5886 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5887 m_pItem = m_pList->Back();
5892 iterator operator++(
int)
5894 iterator result = *
this;
5898 iterator operator--(
int)
5900 iterator result = *
this;
5905 bool operator==(
const iterator& rhs)
const
5907 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5908 return m_pItem == rhs.m_pItem;
5910 bool operator!=(
const iterator& rhs)
const
5912 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5913 return m_pItem != rhs.m_pItem;
5917 VmaRawList<T>* m_pList;
5918 VmaListItem<T>* m_pItem;
5920 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5926 friend class VmaList<T, AllocatorT>;
5929 class const_iterator
5938 const_iterator(
const iterator& src) :
5939 m_pList(src.m_pList),
5940 m_pItem(src.m_pItem)
5944 const T& operator*()
const
5946 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5947 return m_pItem->Value;
5949 const T* operator->()
const
5951 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5952 return &m_pItem->Value;
5955 const_iterator& operator++()
5957 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5958 m_pItem = m_pItem->pNext;
5961 const_iterator& operator--()
5963 if(m_pItem != VMA_NULL)
5965 m_pItem = m_pItem->pPrev;
5969 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5970 m_pItem = m_pList->Back();
5975 const_iterator operator++(
int)
5977 const_iterator result = *
this;
5981 const_iterator operator--(
int)
5983 const_iterator result = *
this;
5988 bool operator==(
const const_iterator& rhs)
const
5990 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5991 return m_pItem == rhs.m_pItem;
5993 bool operator!=(
const const_iterator& rhs)
const
5995 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5996 return m_pItem != rhs.m_pItem;
6000 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
6006 const VmaRawList<T>* m_pList;
6007 const VmaListItem<T>* m_pItem;
6009 friend class VmaList<T, AllocatorT>;
6012 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
6014 bool empty()
const {
return m_RawList.IsEmpty(); }
6015 size_t size()
const {
return m_RawList.GetCount(); }
6017 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
6018 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
6020 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
6021 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
6023 void clear() { m_RawList.Clear(); }
6024 void push_back(
const T& value) { m_RawList.PushBack(value); }
6025 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
6026 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
6029 VmaRawList<T> m_RawList;
6048 template<
typename ItemTypeTraits>
6049 class VmaIntrusiveLinkedList
6052 typedef typename ItemTypeTraits::ItemType ItemType;
6053 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
6054 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
6056 VmaIntrusiveLinkedList() { }
6057 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6058 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
6059 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
6061 src.m_Front = src.m_Back = VMA_NULL;
6064 ~VmaIntrusiveLinkedList()
6066 VMA_HEAVY_ASSERT(IsEmpty());
6068 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6069 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
6073 VMA_HEAVY_ASSERT(IsEmpty());
6074 m_Front = src.m_Front;
6075 m_Back = src.m_Back;
6076 m_Count = src.m_Count;
6077 src.m_Front = src.m_Back = VMA_NULL;
6086 ItemType* item = m_Back;
6087 while(item != VMA_NULL)
6089 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
6090 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6091 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6099 size_t GetCount()
const {
return m_Count; }
6100 bool IsEmpty()
const {
return m_Count == 0; }
6101 ItemType* Front() {
return m_Front; }
6102 const ItemType* Front()
const {
return m_Front; }
6103 ItemType* Back() {
return m_Back; }
6104 const ItemType* Back()
const {
return m_Back; }
6105 void PushBack(ItemType* item)
6107 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6116 ItemTypeTraits::AccessPrev(item) = m_Back;
6117 ItemTypeTraits::AccessNext(m_Back) = item;
6122 void PushFront(ItemType* item)
6124 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6133 ItemTypeTraits::AccessNext(item) = m_Front;
6134 ItemTypeTraits::AccessPrev(m_Front) = item;
6141 VMA_HEAVY_ASSERT(m_Count > 0);
6142 ItemType*
const backItem = m_Back;
6143 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
6144 if(prevItem != VMA_NULL)
6146 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
6150 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
6151 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
6154 ItemType* PopFront()
6156 VMA_HEAVY_ASSERT(m_Count > 0);
6157 ItemType*
const frontItem = m_Front;
6158 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
6159 if(nextItem != VMA_NULL)
6161 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
6165 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
6166 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
6171 void InsertBefore(ItemType* existingItem, ItemType* newItem)
6173 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6174 if(existingItem != VMA_NULL)
6176 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
6177 ItemTypeTraits::AccessPrev(newItem) = prevItem;
6178 ItemTypeTraits::AccessNext(newItem) = existingItem;
6179 ItemTypeTraits::AccessPrev(existingItem) = newItem;
6180 if(prevItem != VMA_NULL)
6182 ItemTypeTraits::AccessNext(prevItem) = newItem;
6186 VMA_HEAVY_ASSERT(m_Front == existingItem);
6195 void InsertAfter(ItemType* existingItem, ItemType* newItem)
6197 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6198 if(existingItem != VMA_NULL)
6200 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
6201 ItemTypeTraits::AccessNext(newItem) = nextItem;
6202 ItemTypeTraits::AccessPrev(newItem) = existingItem;
6203 ItemTypeTraits::AccessNext(existingItem) = newItem;
6204 if(nextItem != VMA_NULL)
6206 ItemTypeTraits::AccessPrev(nextItem) = newItem;
6210 VMA_HEAVY_ASSERT(m_Back == existingItem);
6216 return PushFront(newItem);
6218 void Remove(ItemType* item)
6220 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
6221 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
6223 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
6227 VMA_HEAVY_ASSERT(m_Front == item);
6228 m_Front = ItemTypeTraits::GetNext(item);
6231 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
6233 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
6237 VMA_HEAVY_ASSERT(m_Back == item);
6238 m_Back = ItemTypeTraits::GetPrev(item);
6240 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6241 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6245 ItemType* m_Front = VMA_NULL;
6246 ItemType* m_Back = VMA_NULL;
6256 #if VMA_USE_STL_UNORDERED_MAP
6258 #define VmaPair std::pair
6260 #define VMA_MAP_TYPE(KeyT, ValueT) \
6261 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
6265 template<
typename T1,
typename T2>
6271 VmaPair() : first(), second() { }
6272 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6278 template<
typename KeyT,
typename ValueT>
6282 typedef VmaPair<KeyT, ValueT> PairType;
6283 typedef PairType* iterator;
6285 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6287 iterator begin() {
return m_Vector.begin(); }
6288 iterator end() {
return m_Vector.end(); }
6290 void insert(
const PairType& pair);
6291 iterator find(
const KeyT& key);
6292 void erase(iterator it);
6295 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6298 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6300 template<
typename FirstT,
typename SecondT>
6301 struct VmaPairFirstLess
6303 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6305 return lhs.first < rhs.first;
6307 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6309 return lhs.first < rhsFirst;
6313 template<
typename KeyT,
typename ValueT>
6314 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6316 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6318 m_Vector.data() + m_Vector.size(),
6320 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6321 VmaVectorInsert(m_Vector, indexToInsert, pair);
6324 template<
typename KeyT,
typename ValueT>
6325 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6327 PairType* it = VmaBinaryFindFirstNotLess(
6329 m_Vector.data() + m_Vector.size(),
6331 VmaPairFirstLess<KeyT, ValueT>());
6332 if((it != m_Vector.end()) && (it->first == key))
6338 return m_Vector.end();
6342 template<
typename KeyT,
typename ValueT>
6343 void VmaMap<KeyT, ValueT>::erase(iterator it)
6345 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6354 class VmaDeviceMemoryBlock;
6356 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6358 struct VmaAllocation_T
6361 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6365 FLAG_USER_DATA_STRING = 0x01,
6369 enum ALLOCATION_TYPE
6371 ALLOCATION_TYPE_NONE,
6372 ALLOCATION_TYPE_BLOCK,
6373 ALLOCATION_TYPE_DEDICATED,
6380 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6383 m_pUserData{VMA_NULL},
6384 m_LastUseFrameIndex{currentFrameIndex},
6385 m_MemoryTypeIndex{0},
6386 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6387 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6389 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6391 #if VMA_STATS_STRING_ENABLED
6392 m_CreationFrameIndex = currentFrameIndex;
6393 m_BufferImageUsage = 0;
6399 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6402 VMA_ASSERT(m_pUserData == VMA_NULL);
6405 void InitBlockAllocation(
6406 VmaDeviceMemoryBlock* block,
6407 VkDeviceSize offset,
6408 VkDeviceSize alignment,
6410 uint32_t memoryTypeIndex,
6411 VmaSuballocationType suballocationType,
6415 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6416 VMA_ASSERT(block != VMA_NULL);
6417 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6418 m_Alignment = alignment;
6420 m_MemoryTypeIndex = memoryTypeIndex;
6421 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6422 m_SuballocationType = (uint8_t)suballocationType;
6423 m_BlockAllocation.m_Block = block;
6424 m_BlockAllocation.m_Offset = offset;
6425 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6430 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6431 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6432 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6433 m_MemoryTypeIndex = 0;
6434 m_BlockAllocation.m_Block = VMA_NULL;
6435 m_BlockAllocation.m_Offset = 0;
6436 m_BlockAllocation.m_CanBecomeLost =
true;
6439 void ChangeBlockAllocation(
6441 VmaDeviceMemoryBlock* block,
6442 VkDeviceSize offset);
6444 void ChangeOffset(VkDeviceSize newOffset);
6447 void InitDedicatedAllocation(
6448 uint32_t memoryTypeIndex,
6449 VkDeviceMemory hMemory,
6450 VmaSuballocationType suballocationType,
6454 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6455 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6456 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6459 m_MemoryTypeIndex = memoryTypeIndex;
6460 m_SuballocationType = (uint8_t)suballocationType;
6461 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6462 m_DedicatedAllocation.m_hMemory = hMemory;
6463 m_DedicatedAllocation.m_pMappedData = pMappedData;
6464 m_DedicatedAllocation.m_Prev = VMA_NULL;
6465 m_DedicatedAllocation.m_Next = VMA_NULL;
6468 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6469 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6470 VkDeviceSize GetSize()
const {
return m_Size; }
6471 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6472 void* GetUserData()
const {
return m_pUserData; }
6473 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6474 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6476 VmaDeviceMemoryBlock* GetBlock()
const
6478 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6479 return m_BlockAllocation.m_Block;
6481 VkDeviceSize GetOffset()
const;
6482 VkDeviceMemory GetMemory()
const;
6483 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6484 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6485 void* GetMappedData()
const;
6486 bool CanBecomeLost()
const;
6488 uint32_t GetLastUseFrameIndex()
const
6490 return m_LastUseFrameIndex.load();
6492 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6494 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6504 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6506 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6508 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6519 void BlockAllocMap();
6520 void BlockAllocUnmap();
6521 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6524 #if VMA_STATS_STRING_ENABLED
6525 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6526 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6528 void InitBufferImageUsage(uint32_t bufferImageUsage)
6530 VMA_ASSERT(m_BufferImageUsage == 0);
6531 m_BufferImageUsage = bufferImageUsage;
6534 void PrintParameters(
class VmaJsonWriter& json)
const;
6538 VkDeviceSize m_Alignment;
6539 VkDeviceSize m_Size;
6541 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6542 uint32_t m_MemoryTypeIndex;
6544 uint8_t m_SuballocationType;
6551 struct BlockAllocation
6553 VmaDeviceMemoryBlock* m_Block;
6554 VkDeviceSize m_Offset;
6555 bool m_CanBecomeLost;
6559 struct DedicatedAllocation
6561 VkDeviceMemory m_hMemory;
6562 void* m_pMappedData;
6563 VmaAllocation_T* m_Prev;
6564 VmaAllocation_T* m_Next;
6570 BlockAllocation m_BlockAllocation;
6572 DedicatedAllocation m_DedicatedAllocation;
6575 #if VMA_STATS_STRING_ENABLED
6576 uint32_t m_CreationFrameIndex;
6577 uint32_t m_BufferImageUsage;
6582 friend struct VmaDedicatedAllocationListItemTraits;
6585 struct VmaDedicatedAllocationListItemTraits
6587 typedef VmaAllocation_T ItemType;
6588 static ItemType* GetPrev(
const ItemType* item)
6590 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6591 return item->m_DedicatedAllocation.m_Prev;
6593 static ItemType* GetNext(
const ItemType* item)
6595 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6596 return item->m_DedicatedAllocation.m_Next;
6598 static ItemType*& AccessPrev(ItemType* item)
6600 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6601 return item->m_DedicatedAllocation.m_Prev;
6603 static ItemType*& AccessNext(ItemType* item){
6604 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6605 return item->m_DedicatedAllocation.m_Next;
6613 struct VmaSuballocation
6615 VkDeviceSize offset;
6618 VmaSuballocationType type;
6622 struct VmaSuballocationOffsetLess
6624 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6626 return lhs.offset < rhs.offset;
6629 struct VmaSuballocationOffsetGreater
6631 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6633 return lhs.offset > rhs.offset;
6637 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6640 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6642 enum class VmaAllocationRequestType
6664 struct VmaAllocationRequest
6666 VkDeviceSize offset;
6667 VkDeviceSize sumFreeSize;
6668 VkDeviceSize sumItemSize;
6669 VmaSuballocationList::iterator item;
6670 size_t itemsToMakeLostCount;
6672 VmaAllocationRequestType type;
6674 VkDeviceSize CalcCost()
const
6676 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6684 class VmaBlockMetadata
6688 virtual ~VmaBlockMetadata() { }
6689 virtual void Init(VkDeviceSize size) { m_Size = size; }
6692 virtual bool Validate()
const = 0;
6693 VkDeviceSize GetSize()
const {
return m_Size; }
6694 virtual size_t GetAllocationCount()
const = 0;
6695 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6696 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6698 virtual bool IsEmpty()
const = 0;
6700 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6702 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6704 #if VMA_STATS_STRING_ENABLED
6705 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6711 virtual bool CreateAllocationRequest(
6712 uint32_t currentFrameIndex,
6713 uint32_t frameInUseCount,
6714 VkDeviceSize bufferImageGranularity,
6715 VkDeviceSize allocSize,
6716 VkDeviceSize allocAlignment,
6718 VmaSuballocationType allocType,
6719 bool canMakeOtherLost,
6722 VmaAllocationRequest* pAllocationRequest) = 0;
6724 virtual bool MakeRequestedAllocationsLost(
6725 uint32_t currentFrameIndex,
6726 uint32_t frameInUseCount,
6727 VmaAllocationRequest* pAllocationRequest) = 0;
6729 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6731 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6735 const VmaAllocationRequest& request,
6736 VmaSuballocationType type,
6737 VkDeviceSize allocSize,
6742 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6745 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6747 #if VMA_STATS_STRING_ENABLED
6748 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6749 VkDeviceSize unusedBytes,
6750 size_t allocationCount,
6751 size_t unusedRangeCount)
const;
6752 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6753 VkDeviceSize offset,
6755 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6756 VkDeviceSize offset,
6757 VkDeviceSize size)
const;
6758 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6762 VkDeviceSize m_Size;
6763 const VkAllocationCallbacks* m_pAllocationCallbacks;
6766 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6767 VMA_ASSERT(0 && "Validation failed: " #cond); \
6771 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6773 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6776 virtual ~VmaBlockMetadata_Generic();
6777 virtual void Init(VkDeviceSize size);
6779 virtual bool Validate()
const;
6780 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6781 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6782 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6783 virtual bool IsEmpty()
const;
6785 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6786 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6788 #if VMA_STATS_STRING_ENABLED
6789 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6792 virtual bool CreateAllocationRequest(
6793 uint32_t currentFrameIndex,
6794 uint32_t frameInUseCount,
6795 VkDeviceSize bufferImageGranularity,
6796 VkDeviceSize allocSize,
6797 VkDeviceSize allocAlignment,
6799 VmaSuballocationType allocType,
6800 bool canMakeOtherLost,
6802 VmaAllocationRequest* pAllocationRequest);
6804 virtual bool MakeRequestedAllocationsLost(
6805 uint32_t currentFrameIndex,
6806 uint32_t frameInUseCount,
6807 VmaAllocationRequest* pAllocationRequest);
6809 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6811 virtual VkResult CheckCorruption(
const void* pBlockData);
6814 const VmaAllocationRequest& request,
6815 VmaSuballocationType type,
6816 VkDeviceSize allocSize,
6820 virtual void FreeAtOffset(VkDeviceSize offset);
6825 bool IsBufferImageGranularityConflictPossible(
6826 VkDeviceSize bufferImageGranularity,
6827 VmaSuballocationType& inOutPrevSuballocType)
const;
6830 friend class VmaDefragmentationAlgorithm_Generic;
6831 friend class VmaDefragmentationAlgorithm_Fast;
6833 uint32_t m_FreeCount;
6834 VkDeviceSize m_SumFreeSize;
6835 VmaSuballocationList m_Suballocations;
6838 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6840 bool ValidateFreeSuballocationList()
const;
6844 bool CheckAllocation(
6845 uint32_t currentFrameIndex,
6846 uint32_t frameInUseCount,
6847 VkDeviceSize bufferImageGranularity,
6848 VkDeviceSize allocSize,
6849 VkDeviceSize allocAlignment,
6850 VmaSuballocationType allocType,
6851 VmaSuballocationList::const_iterator suballocItem,
6852 bool canMakeOtherLost,
6853 VkDeviceSize* pOffset,
6854 size_t* itemsToMakeLostCount,
6855 VkDeviceSize* pSumFreeSize,
6856 VkDeviceSize* pSumItemSize)
const;
6858 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6862 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6865 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6868 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6949 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6951 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6954 virtual ~VmaBlockMetadata_Linear();
6955 virtual void Init(VkDeviceSize size);
6957 virtual bool Validate()
const;
6958 virtual size_t GetAllocationCount()
const;
6959 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6960 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6961 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6963 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6964 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6966 #if VMA_STATS_STRING_ENABLED
6967 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6970 virtual bool CreateAllocationRequest(
6971 uint32_t currentFrameIndex,
6972 uint32_t frameInUseCount,
6973 VkDeviceSize bufferImageGranularity,
6974 VkDeviceSize allocSize,
6975 VkDeviceSize allocAlignment,
6977 VmaSuballocationType allocType,
6978 bool canMakeOtherLost,
6980 VmaAllocationRequest* pAllocationRequest);
6982 virtual bool MakeRequestedAllocationsLost(
6983 uint32_t currentFrameIndex,
6984 uint32_t frameInUseCount,
6985 VmaAllocationRequest* pAllocationRequest);
6987 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6989 virtual VkResult CheckCorruption(
const void* pBlockData);
6992 const VmaAllocationRequest& request,
6993 VmaSuballocationType type,
6994 VkDeviceSize allocSize,
6998 virtual void FreeAtOffset(VkDeviceSize offset);
7008 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
7010 enum SECOND_VECTOR_MODE
7012 SECOND_VECTOR_EMPTY,
7017 SECOND_VECTOR_RING_BUFFER,
7023 SECOND_VECTOR_DOUBLE_STACK,
7026 VkDeviceSize m_SumFreeSize;
7027 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7028 uint32_t m_1stVectorIndex;
7029 SECOND_VECTOR_MODE m_2ndVectorMode;
7031 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7032 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7033 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7034 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7037 size_t m_1stNullItemsBeginCount;
7039 size_t m_1stNullItemsMiddleCount;
7041 size_t m_2ndNullItemsCount;
7043 bool ShouldCompact1st()
const;
7044 void CleanupAfterFree();
7046 bool CreateAllocationRequest_LowerAddress(
7047 uint32_t currentFrameIndex,
7048 uint32_t frameInUseCount,
7049 VkDeviceSize bufferImageGranularity,
7050 VkDeviceSize allocSize,
7051 VkDeviceSize allocAlignment,
7052 VmaSuballocationType allocType,
7053 bool canMakeOtherLost,
7055 VmaAllocationRequest* pAllocationRequest);
7056 bool CreateAllocationRequest_UpperAddress(
7057 uint32_t currentFrameIndex,
7058 uint32_t frameInUseCount,
7059 VkDeviceSize bufferImageGranularity,
7060 VkDeviceSize allocSize,
7061 VkDeviceSize allocAlignment,
7062 VmaSuballocationType allocType,
7063 bool canMakeOtherLost,
7065 VmaAllocationRequest* pAllocationRequest);
7079 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
7081 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
7084 virtual ~VmaBlockMetadata_Buddy();
7085 virtual void Init(VkDeviceSize size);
7087 virtual bool Validate()
const;
7088 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
7089 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
7090 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7091 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
7093 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7094 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7096 #if VMA_STATS_STRING_ENABLED
7097 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7100 virtual bool CreateAllocationRequest(
7101 uint32_t currentFrameIndex,
7102 uint32_t frameInUseCount,
7103 VkDeviceSize bufferImageGranularity,
7104 VkDeviceSize allocSize,
7105 VkDeviceSize allocAlignment,
7107 VmaSuballocationType allocType,
7108 bool canMakeOtherLost,
7110 VmaAllocationRequest* pAllocationRequest);
7112 virtual bool MakeRequestedAllocationsLost(
7113 uint32_t currentFrameIndex,
7114 uint32_t frameInUseCount,
7115 VmaAllocationRequest* pAllocationRequest);
7117 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7119 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
7122 const VmaAllocationRequest& request,
7123 VmaSuballocationType type,
7124 VkDeviceSize allocSize,
7127 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
7128 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
7131 static const VkDeviceSize MIN_NODE_SIZE = 32;
7132 static const size_t MAX_LEVELS = 30;
7134 struct ValidationContext
7136 size_t calculatedAllocationCount;
7137 size_t calculatedFreeCount;
7138 VkDeviceSize calculatedSumFreeSize;
7140 ValidationContext() :
7141 calculatedAllocationCount(0),
7142 calculatedFreeCount(0),
7143 calculatedSumFreeSize(0) { }
7148 VkDeviceSize offset;
7178 VkDeviceSize m_UsableSize;
7179 uint32_t m_LevelCount;
7185 } m_FreeList[MAX_LEVELS];
7187 size_t m_AllocationCount;
7191 VkDeviceSize m_SumFreeSize;
7193 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
7194 void DeleteNode(Node* node);
7195 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
7196 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
7197 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
7199 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
7200 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
7204 void AddToFreeListFront(uint32_t level, Node* node);
7208 void RemoveFromFreeList(uint32_t level, Node* node);
7210 #if VMA_STATS_STRING_ENABLED
7211 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
7221 class VmaDeviceMemoryBlock
7223 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
7225 VmaBlockMetadata* m_pMetadata;
7229 ~VmaDeviceMemoryBlock()
7231 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
7232 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
7239 uint32_t newMemoryTypeIndex,
7240 VkDeviceMemory newMemory,
7241 VkDeviceSize newSize,
7243 uint32_t algorithm);
7247 VmaPool GetParentPool()
const {
return m_hParentPool; }
7248 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
7249 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7250 uint32_t GetId()
const {
return m_Id; }
7251 void* GetMappedData()
const {
return m_pMappedData; }
7254 bool Validate()
const;
7259 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
7262 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7263 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7265 VkResult BindBufferMemory(
7268 VkDeviceSize allocationLocalOffset,
7271 VkResult BindImageMemory(
7274 VkDeviceSize allocationLocalOffset,
7280 uint32_t m_MemoryTypeIndex;
7282 VkDeviceMemory m_hMemory;
7290 uint32_t m_MapCount;
7291 void* m_pMappedData;
7294 struct VmaDefragmentationMove
7296 size_t srcBlockIndex;
7297 size_t dstBlockIndex;
7298 VkDeviceSize srcOffset;
7299 VkDeviceSize dstOffset;
7302 VmaDeviceMemoryBlock* pSrcBlock;
7303 VmaDeviceMemoryBlock* pDstBlock;
7306 class VmaDefragmentationAlgorithm;
7314 struct VmaBlockVector
7316 VMA_CLASS_NO_COPY(VmaBlockVector)
7321 uint32_t memoryTypeIndex,
7322 VkDeviceSize preferredBlockSize,
7323 size_t minBlockCount,
7324 size_t maxBlockCount,
7325 VkDeviceSize bufferImageGranularity,
7326 uint32_t frameInUseCount,
7327 bool explicitBlockSize,
7332 VkResult CreateMinBlocks();
7334 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7335 VmaPool GetParentPool()
const {
return m_hParentPool; }
7336 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7337 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7338 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7339 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7340 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7341 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7346 bool IsCorruptionDetectionEnabled()
const;
7349 uint32_t currentFrameIndex,
7351 VkDeviceSize alignment,
7353 VmaSuballocationType suballocType,
7354 size_t allocationCount,
7362 #if VMA_STATS_STRING_ENABLED
7363 void PrintDetailedMap(
class VmaJsonWriter& json);
7366 void MakePoolAllocationsLost(
7367 uint32_t currentFrameIndex,
7368 size_t* pLostAllocationCount);
7369 VkResult CheckCorruption();
7373 class VmaBlockVectorDefragmentationContext* pCtx,
7375 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7376 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7377 VkCommandBuffer commandBuffer);
7378 void DefragmentationEnd(
7379 class VmaBlockVectorDefragmentationContext* pCtx,
7383 uint32_t ProcessDefragmentations(
7384 class VmaBlockVectorDefragmentationContext *pCtx,
7387 void CommitDefragmentations(
7388 class VmaBlockVectorDefragmentationContext *pCtx,
7394 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7395 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7396 size_t CalcAllocationCount()
const;
7397 bool IsBufferImageGranularityConflictPossible()
const;
7400 friend class VmaDefragmentationAlgorithm_Generic;
7404 const uint32_t m_MemoryTypeIndex;
7405 const VkDeviceSize m_PreferredBlockSize;
7406 const size_t m_MinBlockCount;
7407 const size_t m_MaxBlockCount;
7408 const VkDeviceSize m_BufferImageGranularity;
7409 const uint32_t m_FrameInUseCount;
7410 const bool m_ExplicitBlockSize;
7411 const uint32_t m_Algorithm;
7412 const float m_Priority;
7413 VMA_RW_MUTEX m_Mutex;
7417 bool m_HasEmptyBlock;
7419 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7420 uint32_t m_NextBlockId;
7422 VkDeviceSize CalcMaxBlockSize()
const;
7425 void Remove(VmaDeviceMemoryBlock* pBlock);
7429 void IncrementallySortBlocks();
7431 VkResult AllocatePage(
7432 uint32_t currentFrameIndex,
7434 VkDeviceSize alignment,
7436 VmaSuballocationType suballocType,
7440 VkResult AllocateFromBlock(
7441 VmaDeviceMemoryBlock* pBlock,
7442 uint32_t currentFrameIndex,
7444 VkDeviceSize alignment,
7447 VmaSuballocationType suballocType,
7451 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7454 void ApplyDefragmentationMovesCpu(
7455 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7456 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7458 void ApplyDefragmentationMovesGpu(
7459 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7460 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7461 VkCommandBuffer commandBuffer);
7469 void UpdateHasEmptyBlock();
7474 VMA_CLASS_NO_COPY(VmaPool_T)
7476 VmaBlockVector m_BlockVector;
7481 VkDeviceSize preferredBlockSize);
7484 uint32_t GetId()
const {
return m_Id; }
7485 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7487 const char* GetName()
const {
return m_Name; }
7488 void SetName(
const char* pName);
7490 #if VMA_STATS_STRING_ENABLED
7497 VmaPool_T* m_PrevPool = VMA_NULL;
7498 VmaPool_T* m_NextPool = VMA_NULL;
7499 friend struct VmaPoolListItemTraits;
7502 struct VmaPoolListItemTraits
7504 typedef VmaPool_T ItemType;
7505 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
7506 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
7507 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
7508 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
7518 class VmaDefragmentationAlgorithm
7520 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7522 VmaDefragmentationAlgorithm(
7524 VmaBlockVector* pBlockVector,
7525 uint32_t currentFrameIndex) :
7526 m_hAllocator(hAllocator),
7527 m_pBlockVector(pBlockVector),
7528 m_CurrentFrameIndex(currentFrameIndex)
7531 virtual ~VmaDefragmentationAlgorithm()
7535 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7536 virtual void AddAll() = 0;
7538 virtual VkResult Defragment(
7539 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7540 VkDeviceSize maxBytesToMove,
7541 uint32_t maxAllocationsToMove,
7544 virtual VkDeviceSize GetBytesMoved()
const = 0;
7545 virtual uint32_t GetAllocationsMoved()
const = 0;
7549 VmaBlockVector*
const m_pBlockVector;
7550 const uint32_t m_CurrentFrameIndex;
7552 struct AllocationInfo
7555 VkBool32* m_pChanged;
7558 m_hAllocation(VK_NULL_HANDLE),
7559 m_pChanged(VMA_NULL)
7563 m_hAllocation(hAlloc),
7564 m_pChanged(pChanged)
7570 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7572 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7574 VmaDefragmentationAlgorithm_Generic(
7576 VmaBlockVector* pBlockVector,
7577 uint32_t currentFrameIndex,
7578 bool overlappingMoveSupported);
7579 virtual ~VmaDefragmentationAlgorithm_Generic();
7581 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7582 virtual void AddAll() { m_AllAllocations =
true; }
7584 virtual VkResult Defragment(
7585 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7586 VkDeviceSize maxBytesToMove,
7587 uint32_t maxAllocationsToMove,
7590 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7591 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7594 uint32_t m_AllocationCount;
7595 bool m_AllAllocations;
7597 VkDeviceSize m_BytesMoved;
7598 uint32_t m_AllocationsMoved;
7600 struct AllocationInfoSizeGreater
7602 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7604 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7608 struct AllocationInfoOffsetGreater
7610 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7612 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7618 size_t m_OriginalBlockIndex;
7619 VmaDeviceMemoryBlock* m_pBlock;
7620 bool m_HasNonMovableAllocations;
7621 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7623 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7624 m_OriginalBlockIndex(SIZE_MAX),
7626 m_HasNonMovableAllocations(true),
7627 m_Allocations(pAllocationCallbacks)
7631 void CalcHasNonMovableAllocations()
7633 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7634 const size_t defragmentAllocCount = m_Allocations.size();
7635 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7638 void SortAllocationsBySizeDescending()
7640 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7643 void SortAllocationsByOffsetDescending()
7645 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7649 struct BlockPointerLess
7651 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7653 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7655 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7657 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7663 struct BlockInfoCompareMoveDestination
7665 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7667 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7671 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7675 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7683 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7684 BlockInfoVector m_Blocks;
7686 VkResult DefragmentRound(
7687 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7688 VkDeviceSize maxBytesToMove,
7689 uint32_t maxAllocationsToMove,
7690 bool freeOldAllocations);
7692 size_t CalcBlocksWithNonMovableCount()
const;
7694 static bool MoveMakesSense(
7695 size_t dstBlockIndex, VkDeviceSize dstOffset,
7696 size_t srcBlockIndex, VkDeviceSize srcOffset);
7699 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7701 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7703 VmaDefragmentationAlgorithm_Fast(
7705 VmaBlockVector* pBlockVector,
7706 uint32_t currentFrameIndex,
7707 bool overlappingMoveSupported);
7708 virtual ~VmaDefragmentationAlgorithm_Fast();
7710 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7711 virtual void AddAll() { m_AllAllocations =
true; }
7713 virtual VkResult Defragment(
7714 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7715 VkDeviceSize maxBytesToMove,
7716 uint32_t maxAllocationsToMove,
7719 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7720 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7725 size_t origBlockIndex;
7728 class FreeSpaceDatabase
7734 s.blockInfoIndex = SIZE_MAX;
7735 for(
size_t i = 0; i < MAX_COUNT; ++i)
7737 m_FreeSpaces[i] = s;
7741 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7743 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7749 size_t bestIndex = SIZE_MAX;
7750 for(
size_t i = 0; i < MAX_COUNT; ++i)
7753 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7758 if(m_FreeSpaces[i].size < size &&
7759 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7765 if(bestIndex != SIZE_MAX)
7767 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7768 m_FreeSpaces[bestIndex].offset = offset;
7769 m_FreeSpaces[bestIndex].size = size;
7773 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7774 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7776 size_t bestIndex = SIZE_MAX;
7777 VkDeviceSize bestFreeSpaceAfter = 0;
7778 for(
size_t i = 0; i < MAX_COUNT; ++i)
7781 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7783 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7785 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7787 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7789 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7792 bestFreeSpaceAfter = freeSpaceAfter;
7798 if(bestIndex != SIZE_MAX)
7800 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7801 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7803 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7806 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7807 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7808 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7813 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7823 static const size_t MAX_COUNT = 4;
7827 size_t blockInfoIndex;
7828 VkDeviceSize offset;
7830 } m_FreeSpaces[MAX_COUNT];
7833 const bool m_OverlappingMoveSupported;
7835 uint32_t m_AllocationCount;
7836 bool m_AllAllocations;
7838 VkDeviceSize m_BytesMoved;
7839 uint32_t m_AllocationsMoved;
7841 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7843 void PreprocessMetadata();
7844 void PostprocessMetadata();
7845 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7848 struct VmaBlockDefragmentationContext
7852 BLOCK_FLAG_USED = 0x00000001,
7858 class VmaBlockVectorDefragmentationContext
7860 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7864 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7865 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7866 uint32_t defragmentationMovesProcessed;
7867 uint32_t defragmentationMovesCommitted;
7868 bool hasDefragmentationPlan;
7870 VmaBlockVectorDefragmentationContext(
7873 VmaBlockVector* pBlockVector,
7874 uint32_t currFrameIndex);
7875 ~VmaBlockVectorDefragmentationContext();
7877 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7878 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7879 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7881 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7882 void AddAll() { m_AllAllocations =
true; }
7891 VmaBlockVector*
const m_pBlockVector;
7892 const uint32_t m_CurrFrameIndex;
7894 VmaDefragmentationAlgorithm* m_pAlgorithm;
7902 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7903 bool m_AllAllocations;
7906 struct VmaDefragmentationContext_T
7909 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7911 VmaDefragmentationContext_T(
7913 uint32_t currFrameIndex,
7916 ~VmaDefragmentationContext_T();
7918 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7919 void AddAllocations(
7920 uint32_t allocationCount,
7922 VkBool32* pAllocationsChanged);
7930 VkResult Defragment(
7931 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7932 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7936 VkResult DefragmentPassEnd();
7940 const uint32_t m_CurrFrameIndex;
7941 const uint32_t m_Flags;
7944 VkDeviceSize m_MaxCpuBytesToMove;
7945 uint32_t m_MaxCpuAllocationsToMove;
7946 VkDeviceSize m_MaxGpuBytesToMove;
7947 uint32_t m_MaxGpuAllocationsToMove;
7950 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7952 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7955 #if VMA_RECORDING_ENABLED
7962 void WriteConfiguration(
7963 const VkPhysicalDeviceProperties& devProps,
7964 const VkPhysicalDeviceMemoryProperties& memProps,
7965 uint32_t vulkanApiVersion,
7966 bool dedicatedAllocationExtensionEnabled,
7967 bool bindMemory2ExtensionEnabled,
7968 bool memoryBudgetExtensionEnabled,
7969 bool deviceCoherentMemoryExtensionEnabled);
7972 void RecordCreateAllocator(uint32_t frameIndex);
7973 void RecordDestroyAllocator(uint32_t frameIndex);
7974 void RecordCreatePool(uint32_t frameIndex,
7977 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7978 void RecordAllocateMemory(uint32_t frameIndex,
7979 const VkMemoryRequirements& vkMemReq,
7982 void RecordAllocateMemoryPages(uint32_t frameIndex,
7983 const VkMemoryRequirements& vkMemReq,
7985 uint64_t allocationCount,
7987 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7988 const VkMemoryRequirements& vkMemReq,
7989 bool requiresDedicatedAllocation,
7990 bool prefersDedicatedAllocation,
7993 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7994 const VkMemoryRequirements& vkMemReq,
7995 bool requiresDedicatedAllocation,
7996 bool prefersDedicatedAllocation,
7999 void RecordFreeMemory(uint32_t frameIndex,
8001 void RecordFreeMemoryPages(uint32_t frameIndex,
8002 uint64_t allocationCount,
8004 void RecordSetAllocationUserData(uint32_t frameIndex,
8006 const void* pUserData);
8007 void RecordCreateLostAllocation(uint32_t frameIndex,
8009 void RecordMapMemory(uint32_t frameIndex,
8011 void RecordUnmapMemory(uint32_t frameIndex,
8013 void RecordFlushAllocation(uint32_t frameIndex,
8014 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8015 void RecordInvalidateAllocation(uint32_t frameIndex,
8016 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8017 void RecordCreateBuffer(uint32_t frameIndex,
8018 const VkBufferCreateInfo& bufCreateInfo,
8021 void RecordCreateImage(uint32_t frameIndex,
8022 const VkImageCreateInfo& imageCreateInfo,
8025 void RecordDestroyBuffer(uint32_t frameIndex,
8027 void RecordDestroyImage(uint32_t frameIndex,
8029 void RecordTouchAllocation(uint32_t frameIndex,
8031 void RecordGetAllocationInfo(uint32_t frameIndex,
8033 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
8035 void RecordDefragmentationBegin(uint32_t frameIndex,
8038 void RecordDefragmentationEnd(uint32_t frameIndex,
8040 void RecordSetPoolName(uint32_t frameIndex,
8051 class UserDataString
8055 const char* GetString()
const {
return m_Str; }
8065 VMA_MUTEX m_FileMutex;
8066 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
8068 void GetBasicParams(CallParams& outParams);
8071 template<
typename T>
8072 void PrintPointerList(uint64_t count,
const T* pItems)
8076 fprintf(m_File,
"%p", pItems[0]);
8077 for(uint64_t i = 1; i < count; ++i)
8079 fprintf(m_File,
" %p", pItems[i]);
8084 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
8093 class VmaAllocationObjectAllocator
8095 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
8097 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
8099 template<
typename... Types>
VmaAllocation Allocate(Types... args);
8104 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
8107 struct VmaCurrentBudgetData
8109 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
8110 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
8112 #if VMA_MEMORY_BUDGET
8113 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
8114 VMA_RW_MUTEX m_BudgetMutex;
8115 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
8116 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
8117 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
8120 VmaCurrentBudgetData()
8122 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
8124 m_BlockBytes[heapIndex] = 0;
8125 m_AllocationBytes[heapIndex] = 0;
8126 #if VMA_MEMORY_BUDGET
8127 m_VulkanUsage[heapIndex] = 0;
8128 m_VulkanBudget[heapIndex] = 0;
8129 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
8133 #if VMA_MEMORY_BUDGET
8134 m_OperationsSinceBudgetFetch = 0;
8138 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8140 m_AllocationBytes[heapIndex] += allocationSize;
8141 #if VMA_MEMORY_BUDGET
8142 ++m_OperationsSinceBudgetFetch;
8146 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8148 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
8149 m_AllocationBytes[heapIndex] -= allocationSize;
8150 #if VMA_MEMORY_BUDGET
8151 ++m_OperationsSinceBudgetFetch;
8157 struct VmaAllocator_T
8159 VMA_CLASS_NO_COPY(VmaAllocator_T)
8162 uint32_t m_VulkanApiVersion;
8163 bool m_UseKhrDedicatedAllocation;
8164 bool m_UseKhrBindMemory2;
8165 bool m_UseExtMemoryBudget;
8166 bool m_UseAmdDeviceCoherentMemory;
8167 bool m_UseKhrBufferDeviceAddress;
8168 bool m_UseExtMemoryPriority;
8170 VkInstance m_hInstance;
8171 bool m_AllocationCallbacksSpecified;
8172 VkAllocationCallbacks m_AllocationCallbacks;
8174 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
8177 uint32_t m_HeapSizeLimitMask;
8179 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
8180 VkPhysicalDeviceMemoryProperties m_MemProps;
8183 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
8185 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
8186 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
8187 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
8189 VmaCurrentBudgetData m_Budget;
8190 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
8196 const VkAllocationCallbacks* GetAllocationCallbacks()
const
8198 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
8202 return m_VulkanFunctions;
8205 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
8207 VkDeviceSize GetBufferImageGranularity()
const
8210 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
8211 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
8214 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
8215 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
8217 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
8219 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
8220 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
8223 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
8225 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
8226 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8229 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
8231 return IsMemoryTypeNonCoherent(memTypeIndex) ?
8232 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
8233 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
8236 bool IsIntegratedGpu()
const
8238 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
8241 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
8243 #if VMA_RECORDING_ENABLED
8244 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
8247 void GetBufferMemoryRequirements(
8249 VkMemoryRequirements& memReq,
8250 bool& requiresDedicatedAllocation,
8251 bool& prefersDedicatedAllocation)
const;
8252 void GetImageMemoryRequirements(
8254 VkMemoryRequirements& memReq,
8255 bool& requiresDedicatedAllocation,
8256 bool& prefersDedicatedAllocation)
const;
8259 VkResult AllocateMemory(
8260 const VkMemoryRequirements& vkMemReq,
8261 bool requiresDedicatedAllocation,
8262 bool prefersDedicatedAllocation,
8263 VkBuffer dedicatedBuffer,
8264 VkBufferUsageFlags dedicatedBufferUsage,
8265 VkImage dedicatedImage,
8267 VmaSuballocationType suballocType,
8268 size_t allocationCount,
8273 size_t allocationCount,
8276 void CalculateStats(
VmaStats* pStats);
8279 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
8281 #if VMA_STATS_STRING_ENABLED
8282 void PrintDetailedMap(
class VmaJsonWriter& json);
8285 VkResult DefragmentationBegin(
8289 VkResult DefragmentationEnd(
8292 VkResult DefragmentationPassBegin(
8295 VkResult DefragmentationPassEnd(
8302 void DestroyPool(
VmaPool pool);
8305 void SetCurrentFrameIndex(uint32_t frameIndex);
8306 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8308 void MakePoolAllocationsLost(
8310 size_t* pLostAllocationCount);
8311 VkResult CheckPoolCorruption(
VmaPool hPool);
8312 VkResult CheckCorruption(uint32_t memoryTypeBits);
8317 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8319 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8321 VkResult BindVulkanBuffer(
8322 VkDeviceMemory memory,
8323 VkDeviceSize memoryOffset,
8327 VkResult BindVulkanImage(
8328 VkDeviceMemory memory,
8329 VkDeviceSize memoryOffset,
8336 VkResult BindBufferMemory(
8338 VkDeviceSize allocationLocalOffset,
8341 VkResult BindImageMemory(
8343 VkDeviceSize allocationLocalOffset,
8347 VkResult FlushOrInvalidateAllocation(
8349 VkDeviceSize offset, VkDeviceSize size,
8350 VMA_CACHE_OPERATION op);
8351 VkResult FlushOrInvalidateAllocations(
8352 uint32_t allocationCount,
8354 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8355 VMA_CACHE_OPERATION op);
8357 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8363 uint32_t GetGpuDefragmentationMemoryTypeBits();
8366 VkDeviceSize m_PreferredLargeHeapBlockSize;
8368 VkPhysicalDevice m_PhysicalDevice;
8369 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8370 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8372 VMA_RW_MUTEX m_PoolsMutex;
8373 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
8376 uint32_t m_NextPoolId;
8381 uint32_t m_GlobalMemoryTypeBits;
8383 #if VMA_RECORDING_ENABLED
8384 VmaRecorder* m_pRecorder;
8389 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8390 void ImportVulkanFunctions_Static();
8395 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8396 void ImportVulkanFunctions_Dynamic();
8399 void ValidateVulkanFunctions();
8401 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8403 VkResult AllocateMemoryOfType(
8405 VkDeviceSize alignment,
8406 bool dedicatedAllocation,
8407 VkBuffer dedicatedBuffer,
8408 VkBufferUsageFlags dedicatedBufferUsage,
8409 VkImage dedicatedImage,
8411 uint32_t memTypeIndex,
8412 VmaSuballocationType suballocType,
8413 size_t allocationCount,
8417 VkResult AllocateDedicatedMemoryPage(
8419 VmaSuballocationType suballocType,
8420 uint32_t memTypeIndex,
8421 const VkMemoryAllocateInfo& allocInfo,
8423 bool isUserDataString,
8428 VkResult AllocateDedicatedMemory(
8430 VmaSuballocationType suballocType,
8431 uint32_t memTypeIndex,
8434 bool isUserDataString,
8437 VkBuffer dedicatedBuffer,
8438 VkBufferUsageFlags dedicatedBufferUsage,
8439 VkImage dedicatedImage,
8440 size_t allocationCount,
8449 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8451 uint32_t CalculateGlobalMemoryTypeBits()
const;
8453 bool GetFlushOrInvalidateRange(
8455 VkDeviceSize offset, VkDeviceSize size,
8456 VkMappedMemoryRange& outRange)
const;
8458 #if VMA_MEMORY_BUDGET
8459 void UpdateVulkanBudget();
8466 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8468 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8471 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8473 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8476 template<
typename T>
8479 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8482 template<
typename T>
8483 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8485 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8488 template<
typename T>
8489 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8494 VmaFree(hAllocator, ptr);
8498 template<
typename T>
8499 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8503 for(
size_t i = count; i--; )
8505 VmaFree(hAllocator, ptr);
8512 #if VMA_STATS_STRING_ENABLED
8514 class VmaStringBuilder
8517 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8518 size_t GetLength()
const {
return m_Data.size(); }
8519 const char* GetData()
const {
return m_Data.data(); }
8521 void Add(
char ch) { m_Data.push_back(ch); }
8522 void Add(
const char* pStr);
8523 void AddNewLine() { Add(
'\n'); }
8524 void AddNumber(uint32_t num);
8525 void AddNumber(uint64_t num);
8526 void AddPointer(
const void* ptr);
8529 VmaVector< char, VmaStlAllocator<char> > m_Data;
8532 void VmaStringBuilder::Add(
const char* pStr)
8534 const size_t strLen = strlen(pStr);
8537 const size_t oldCount = m_Data.size();
8538 m_Data.resize(oldCount + strLen);
8539 memcpy(m_Data.data() + oldCount, pStr, strLen);
8543 void VmaStringBuilder::AddNumber(uint32_t num)
8550 *--p =
'0' + (num % 10);
8557 void VmaStringBuilder::AddNumber(uint64_t num)
8564 *--p =
'0' + (num % 10);
8571 void VmaStringBuilder::AddPointer(
const void* ptr)
8574 VmaPtrToStr(buf,
sizeof(buf), ptr);
8583 #if VMA_STATS_STRING_ENABLED
8587 VMA_CLASS_NO_COPY(VmaJsonWriter)
8589 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8592 void BeginObject(
bool singleLine =
false);
8595 void BeginArray(
bool singleLine =
false);
8598 void WriteString(
const char* pStr);
8599 void BeginString(
const char* pStr = VMA_NULL);
8600 void ContinueString(
const char* pStr);
8601 void ContinueString(uint32_t n);
8602 void ContinueString(uint64_t n);
8603 void ContinueString_Pointer(
const void* ptr);
8604 void EndString(
const char* pStr = VMA_NULL);
8606 void WriteNumber(uint32_t n);
8607 void WriteNumber(uint64_t n);
8608 void WriteBool(
bool b);
8612 static const char*
const INDENT;
8614 enum COLLECTION_TYPE
8616 COLLECTION_TYPE_OBJECT,
8617 COLLECTION_TYPE_ARRAY,
8621 COLLECTION_TYPE type;
8622 uint32_t valueCount;
8623 bool singleLineMode;
8626 VmaStringBuilder& m_SB;
8627 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8628 bool m_InsideString;
8630 void BeginValue(
bool isString);
8631 void WriteIndent(
bool oneLess =
false);
8634 const char*
const VmaJsonWriter::INDENT =
" ";
8636 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8638 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8639 m_InsideString(false)
8643 VmaJsonWriter::~VmaJsonWriter()
8645 VMA_ASSERT(!m_InsideString);
8646 VMA_ASSERT(m_Stack.empty());
8649 void VmaJsonWriter::BeginObject(
bool singleLine)
8651 VMA_ASSERT(!m_InsideString);
8657 item.type = COLLECTION_TYPE_OBJECT;
8658 item.valueCount = 0;
8659 item.singleLineMode = singleLine;
8660 m_Stack.push_back(item);
8663 void VmaJsonWriter::EndObject()
8665 VMA_ASSERT(!m_InsideString);
8670 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8674 void VmaJsonWriter::BeginArray(
bool singleLine)
8676 VMA_ASSERT(!m_InsideString);
8682 item.type = COLLECTION_TYPE_ARRAY;
8683 item.valueCount = 0;
8684 item.singleLineMode = singleLine;
8685 m_Stack.push_back(item);
8688 void VmaJsonWriter::EndArray()
8690 VMA_ASSERT(!m_InsideString);
8695 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8699 void VmaJsonWriter::WriteString(
const char* pStr)
8705 void VmaJsonWriter::BeginString(
const char* pStr)
8707 VMA_ASSERT(!m_InsideString);
8711 m_InsideString =
true;
8712 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8714 ContinueString(pStr);
8718 void VmaJsonWriter::ContinueString(
const char* pStr)
8720 VMA_ASSERT(m_InsideString);
8722 const size_t strLen = strlen(pStr);
8723 for(
size_t i = 0; i < strLen; ++i)
8756 VMA_ASSERT(0 &&
"Character not currently supported.");
8762 void VmaJsonWriter::ContinueString(uint32_t n)
8764 VMA_ASSERT(m_InsideString);
8768 void VmaJsonWriter::ContinueString(uint64_t n)
8770 VMA_ASSERT(m_InsideString);
8774 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8776 VMA_ASSERT(m_InsideString);
8777 m_SB.AddPointer(ptr);
8780 void VmaJsonWriter::EndString(
const char* pStr)
8782 VMA_ASSERT(m_InsideString);
8783 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8785 ContinueString(pStr);
8788 m_InsideString =
false;
8791 void VmaJsonWriter::WriteNumber(uint32_t n)
8793 VMA_ASSERT(!m_InsideString);
8798 void VmaJsonWriter::WriteNumber(uint64_t n)
8800 VMA_ASSERT(!m_InsideString);
8805 void VmaJsonWriter::WriteBool(
bool b)
8807 VMA_ASSERT(!m_InsideString);
8809 m_SB.Add(b ?
"true" :
"false");
8812 void VmaJsonWriter::WriteNull()
8814 VMA_ASSERT(!m_InsideString);
8819 void VmaJsonWriter::BeginValue(
bool isString)
8821 if(!m_Stack.empty())
8823 StackItem& currItem = m_Stack.back();
8824 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8825 currItem.valueCount % 2 == 0)
8827 VMA_ASSERT(isString);
8830 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8831 currItem.valueCount % 2 != 0)
8835 else if(currItem.valueCount > 0)
8844 ++currItem.valueCount;
8848 void VmaJsonWriter::WriteIndent(
bool oneLess)
8850 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8854 size_t count = m_Stack.size();
8855 if(count > 0 && oneLess)
8859 for(
size_t i = 0; i < count; ++i)
8870 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8872 if(IsUserDataString())
8874 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8876 FreeUserDataString(hAllocator);
8878 if(pUserData != VMA_NULL)
8880 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8885 m_pUserData = pUserData;
8889 void VmaAllocation_T::ChangeBlockAllocation(
8891 VmaDeviceMemoryBlock* block,
8892 VkDeviceSize offset)
8894 VMA_ASSERT(block != VMA_NULL);
8895 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8898 if(block != m_BlockAllocation.m_Block)
8900 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8901 if(IsPersistentMap())
8903 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8904 block->Map(hAllocator, mapRefCount, VMA_NULL);
8907 m_BlockAllocation.m_Block = block;
8908 m_BlockAllocation.m_Offset = offset;
8911 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8913 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8914 m_BlockAllocation.m_Offset = newOffset;
8917 VkDeviceSize VmaAllocation_T::GetOffset()
const
8921 case ALLOCATION_TYPE_BLOCK:
8922 return m_BlockAllocation.m_Offset;
8923 case ALLOCATION_TYPE_DEDICATED:
8931 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8935 case ALLOCATION_TYPE_BLOCK:
8936 return m_BlockAllocation.m_Block->GetDeviceMemory();
8937 case ALLOCATION_TYPE_DEDICATED:
8938 return m_DedicatedAllocation.m_hMemory;
8941 return VK_NULL_HANDLE;
8945 void* VmaAllocation_T::GetMappedData()
const
8949 case ALLOCATION_TYPE_BLOCK:
8952 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8953 VMA_ASSERT(pBlockData != VMA_NULL);
8954 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8961 case ALLOCATION_TYPE_DEDICATED:
8962 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8963 return m_DedicatedAllocation.m_pMappedData;
8970 bool VmaAllocation_T::CanBecomeLost()
const
8974 case ALLOCATION_TYPE_BLOCK:
8975 return m_BlockAllocation.m_CanBecomeLost;
8976 case ALLOCATION_TYPE_DEDICATED:
8984 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8986 VMA_ASSERT(CanBecomeLost());
8992 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8995 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9000 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
9006 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
9016 #if VMA_STATS_STRING_ENABLED
9019 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
9028 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
9030 json.WriteString(
"Type");
9031 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
9033 json.WriteString(
"Size");
9034 json.WriteNumber(m_Size);
9036 if(m_pUserData != VMA_NULL)
9038 json.WriteString(
"UserData");
9039 if(IsUserDataString())
9041 json.WriteString((
const char*)m_pUserData);
9046 json.ContinueString_Pointer(m_pUserData);
9051 json.WriteString(
"CreationFrameIndex");
9052 json.WriteNumber(m_CreationFrameIndex);
9054 json.WriteString(
"LastUseFrameIndex");
9055 json.WriteNumber(GetLastUseFrameIndex());
9057 if(m_BufferImageUsage != 0)
9059 json.WriteString(
"Usage");
9060 json.WriteNumber(m_BufferImageUsage);
9066 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
9068 VMA_ASSERT(IsUserDataString());
9069 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
9070 m_pUserData = VMA_NULL;
9073 void VmaAllocation_T::BlockAllocMap()
9075 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9077 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9083 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
9087 void VmaAllocation_T::BlockAllocUnmap()
9089 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9091 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9097 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
9101 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
9103 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9107 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9109 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
9110 *ppData = m_DedicatedAllocation.m_pMappedData;
9116 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
9117 return VK_ERROR_MEMORY_MAP_FAILED;
9122 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9123 hAllocator->m_hDevice,
9124 m_DedicatedAllocation.m_hMemory,
9129 if(result == VK_SUCCESS)
9131 m_DedicatedAllocation.m_pMappedData = *ppData;
9138 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
9140 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9142 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9147 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
9148 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
9149 hAllocator->m_hDevice,
9150 m_DedicatedAllocation.m_hMemory);
9155 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
9159 #if VMA_STATS_STRING_ENABLED
9161 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
9165 json.WriteString(
"Blocks");
9168 json.WriteString(
"Allocations");
9171 json.WriteString(
"UnusedRanges");
9174 json.WriteString(
"UsedBytes");
9177 json.WriteString(
"UnusedBytes");
9182 json.WriteString(
"AllocationSize");
9183 json.BeginObject(
true);
9184 json.WriteString(
"Min");
9186 json.WriteString(
"Avg");
9188 json.WriteString(
"Max");
9195 json.WriteString(
"UnusedRangeSize");
9196 json.BeginObject(
true);
9197 json.WriteString(
"Min");
9199 json.WriteString(
"Avg");
9201 json.WriteString(
"Max");
9211 struct VmaSuballocationItemSizeLess
9214 const VmaSuballocationList::iterator lhs,
9215 const VmaSuballocationList::iterator rhs)
const
9217 return lhs->size < rhs->size;
9220 const VmaSuballocationList::iterator lhs,
9221 VkDeviceSize rhsSize)
const
9223 return lhs->size < rhsSize;
9231 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
9233 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
9237 #if VMA_STATS_STRING_ENABLED
9239 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
9240 VkDeviceSize unusedBytes,
9241 size_t allocationCount,
9242 size_t unusedRangeCount)
const
9246 json.WriteString(
"TotalBytes");
9247 json.WriteNumber(GetSize());
9249 json.WriteString(
"UnusedBytes");
9250 json.WriteNumber(unusedBytes);
9252 json.WriteString(
"Allocations");
9253 json.WriteNumber((uint64_t)allocationCount);
9255 json.WriteString(
"UnusedRanges");
9256 json.WriteNumber((uint64_t)unusedRangeCount);
9258 json.WriteString(
"Suballocations");
9262 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
9263 VkDeviceSize offset,
9266 json.BeginObject(
true);
9268 json.WriteString(
"Offset");
9269 json.WriteNumber(offset);
9271 hAllocation->PrintParameters(json);
9276 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
9277 VkDeviceSize offset,
9278 VkDeviceSize size)
const
9280 json.BeginObject(
true);
9282 json.WriteString(
"Offset");
9283 json.WriteNumber(offset);
9285 json.WriteString(
"Type");
9286 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
9288 json.WriteString(
"Size");
9289 json.WriteNumber(size);
9294 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
9305 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
9306 VmaBlockMetadata(hAllocator),
9309 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9310 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9314 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9318 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9320 VmaBlockMetadata::Init(size);
9323 m_SumFreeSize = size;
9325 VmaSuballocation suballoc = {};
9326 suballoc.offset = 0;
9327 suballoc.size = size;
9328 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9329 suballoc.hAllocation = VK_NULL_HANDLE;
9331 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9332 m_Suballocations.push_back(suballoc);
9333 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9335 m_FreeSuballocationsBySize.push_back(suballocItem);
9338 bool VmaBlockMetadata_Generic::Validate()
const
9340 VMA_VALIDATE(!m_Suballocations.empty());
9343 VkDeviceSize calculatedOffset = 0;
9345 uint32_t calculatedFreeCount = 0;
9347 VkDeviceSize calculatedSumFreeSize = 0;
9350 size_t freeSuballocationsToRegister = 0;
9352 bool prevFree =
false;
9354 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9355 suballocItem != m_Suballocations.cend();
9358 const VmaSuballocation& subAlloc = *suballocItem;
9361 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9363 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9365 VMA_VALIDATE(!prevFree || !currFree);
9367 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9371 calculatedSumFreeSize += subAlloc.size;
9372 ++calculatedFreeCount;
9373 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9375 ++freeSuballocationsToRegister;
9379 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9383 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9384 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9387 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9390 calculatedOffset += subAlloc.size;
9391 prevFree = currFree;
9396 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9398 VkDeviceSize lastSize = 0;
9399 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9401 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9404 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9406 VMA_VALIDATE(suballocItem->size >= lastSize);
9408 lastSize = suballocItem->size;
9412 VMA_VALIDATE(ValidateFreeSuballocationList());
9413 VMA_VALIDATE(calculatedOffset == GetSize());
9414 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9415 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9420 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9422 if(!m_FreeSuballocationsBySize.empty())
9424 return m_FreeSuballocationsBySize.back()->size;
9432 bool VmaBlockMetadata_Generic::IsEmpty()
const
9434 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9437 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9441 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9453 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9454 suballocItem != m_Suballocations.cend();
9457 const VmaSuballocation& suballoc = *suballocItem;
9458 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9471 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9473 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9475 inoutStats.
size += GetSize();
9482 #if VMA_STATS_STRING_ENABLED
9484 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9486 PrintDetailedMap_Begin(json,
9488 m_Suballocations.size() - (
size_t)m_FreeCount,
9492 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9493 suballocItem != m_Suballocations.cend();
9494 ++suballocItem, ++i)
9496 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9498 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9502 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9506 PrintDetailedMap_End(json);
9511 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9512 uint32_t currentFrameIndex,
9513 uint32_t frameInUseCount,
9514 VkDeviceSize bufferImageGranularity,
9515 VkDeviceSize allocSize,
9516 VkDeviceSize allocAlignment,
9518 VmaSuballocationType allocType,
9519 bool canMakeOtherLost,
9521 VmaAllocationRequest* pAllocationRequest)
9523 VMA_ASSERT(allocSize > 0);
9524 VMA_ASSERT(!upperAddress);
9525 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9526 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9527 VMA_HEAVY_ASSERT(Validate());
9529 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9532 if(canMakeOtherLost ==
false &&
9533 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9539 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9540 if(freeSuballocCount > 0)
9545 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9546 m_FreeSuballocationsBySize.data(),
9547 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9548 allocSize + 2 * VMA_DEBUG_MARGIN,
9549 VmaSuballocationItemSizeLess());
9550 size_t index = it - m_FreeSuballocationsBySize.data();
9551 for(; index < freeSuballocCount; ++index)
9556 bufferImageGranularity,
9560 m_FreeSuballocationsBySize[index],
9562 &pAllocationRequest->offset,
9563 &pAllocationRequest->itemsToMakeLostCount,
9564 &pAllocationRequest->sumFreeSize,
9565 &pAllocationRequest->sumItemSize))
9567 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9572 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9574 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9575 it != m_Suballocations.end();
9578 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9581 bufferImageGranularity,
9587 &pAllocationRequest->offset,
9588 &pAllocationRequest->itemsToMakeLostCount,
9589 &pAllocationRequest->sumFreeSize,
9590 &pAllocationRequest->sumItemSize))
9592 pAllocationRequest->item = it;
9600 for(
size_t index = freeSuballocCount; index--; )
9605 bufferImageGranularity,
9609 m_FreeSuballocationsBySize[index],
9611 &pAllocationRequest->offset,
9612 &pAllocationRequest->itemsToMakeLostCount,
9613 &pAllocationRequest->sumFreeSize,
9614 &pAllocationRequest->sumItemSize))
9616 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9623 if(canMakeOtherLost)
9628 VmaAllocationRequest tmpAllocRequest = {};
9629 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9630 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9631 suballocIt != m_Suballocations.end();
9634 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9635 suballocIt->hAllocation->CanBecomeLost())
9640 bufferImageGranularity,
9646 &tmpAllocRequest.offset,
9647 &tmpAllocRequest.itemsToMakeLostCount,
9648 &tmpAllocRequest.sumFreeSize,
9649 &tmpAllocRequest.sumItemSize))
9653 *pAllocationRequest = tmpAllocRequest;
9654 pAllocationRequest->item = suballocIt;
9657 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9659 *pAllocationRequest = tmpAllocRequest;
9660 pAllocationRequest->item = suballocIt;
9673 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9674 uint32_t currentFrameIndex,
9675 uint32_t frameInUseCount,
9676 VmaAllocationRequest* pAllocationRequest)
9678 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9680 while(pAllocationRequest->itemsToMakeLostCount > 0)
9682 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9684 ++pAllocationRequest->item;
9686 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9687 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9688 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9689 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9691 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9692 --pAllocationRequest->itemsToMakeLostCount;
9700 VMA_HEAVY_ASSERT(Validate());
9701 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9702 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9707 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9709 uint32_t lostAllocationCount = 0;
9710 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9711 it != m_Suballocations.end();
9714 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9715 it->hAllocation->CanBecomeLost() &&
9716 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9718 it = FreeSuballocation(it);
9719 ++lostAllocationCount;
9722 return lostAllocationCount;
9725 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9727 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9728 it != m_Suballocations.end();
9731 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9733 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9735 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9736 return VK_ERROR_VALIDATION_FAILED_EXT;
9738 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9740 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9741 return VK_ERROR_VALIDATION_FAILED_EXT;
9749 void VmaBlockMetadata_Generic::Alloc(
9750 const VmaAllocationRequest& request,
9751 VmaSuballocationType type,
9752 VkDeviceSize allocSize,
9755 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9756 VMA_ASSERT(request.item != m_Suballocations.end());
9757 VmaSuballocation& suballoc = *request.item;
9759 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9761 VMA_ASSERT(request.offset >= suballoc.offset);
9762 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9763 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9764 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9768 UnregisterFreeSuballocation(request.item);
9770 suballoc.offset = request.offset;
9771 suballoc.size = allocSize;
9772 suballoc.type = type;
9773 suballoc.hAllocation = hAllocation;
9778 VmaSuballocation paddingSuballoc = {};
9779 paddingSuballoc.offset = request.offset + allocSize;
9780 paddingSuballoc.size = paddingEnd;
9781 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9782 VmaSuballocationList::iterator next = request.item;
9784 const VmaSuballocationList::iterator paddingEndItem =
9785 m_Suballocations.insert(next, paddingSuballoc);
9786 RegisterFreeSuballocation(paddingEndItem);
9792 VmaSuballocation paddingSuballoc = {};
9793 paddingSuballoc.offset = request.offset - paddingBegin;
9794 paddingSuballoc.size = paddingBegin;
9795 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9796 const VmaSuballocationList::iterator paddingBeginItem =
9797 m_Suballocations.insert(request.item, paddingSuballoc);
9798 RegisterFreeSuballocation(paddingBeginItem);
9802 m_FreeCount = m_FreeCount - 1;
9803 if(paddingBegin > 0)
9811 m_SumFreeSize -= allocSize;
9814 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9816 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9817 suballocItem != m_Suballocations.end();
9820 VmaSuballocation& suballoc = *suballocItem;
9821 if(suballoc.hAllocation == allocation)
9823 FreeSuballocation(suballocItem);
9824 VMA_HEAVY_ASSERT(Validate());
9828 VMA_ASSERT(0 &&
"Not found!");
9831 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9833 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9834 suballocItem != m_Suballocations.end();
9837 VmaSuballocation& suballoc = *suballocItem;
9838 if(suballoc.offset == offset)
9840 FreeSuballocation(suballocItem);
9844 VMA_ASSERT(0 &&
"Not found!");
9847 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9849 VkDeviceSize lastSize = 0;
9850 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9852 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9854 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9855 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9856 VMA_VALIDATE(it->size >= lastSize);
9857 lastSize = it->size;
9862 bool VmaBlockMetadata_Generic::CheckAllocation(
9863 uint32_t currentFrameIndex,
9864 uint32_t frameInUseCount,
9865 VkDeviceSize bufferImageGranularity,
9866 VkDeviceSize allocSize,
9867 VkDeviceSize allocAlignment,
9868 VmaSuballocationType allocType,
9869 VmaSuballocationList::const_iterator suballocItem,
9870 bool canMakeOtherLost,
9871 VkDeviceSize* pOffset,
9872 size_t* itemsToMakeLostCount,
9873 VkDeviceSize* pSumFreeSize,
9874 VkDeviceSize* pSumItemSize)
const
9876 VMA_ASSERT(allocSize > 0);
9877 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9878 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9879 VMA_ASSERT(pOffset != VMA_NULL);
9881 *itemsToMakeLostCount = 0;
9885 if(canMakeOtherLost)
9887 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9889 *pSumFreeSize = suballocItem->size;
9893 if(suballocItem->hAllocation->CanBecomeLost() &&
9894 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9896 ++*itemsToMakeLostCount;
9897 *pSumItemSize = suballocItem->size;
9906 if(GetSize() - suballocItem->offset < allocSize)
9912 *pOffset = suballocItem->offset;
9915 if(VMA_DEBUG_MARGIN > 0)
9917 *pOffset += VMA_DEBUG_MARGIN;
9921 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9925 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
9927 bool bufferImageGranularityConflict =
false;
9928 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9929 while(prevSuballocItem != m_Suballocations.cbegin())
9932 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9933 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9935 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9937 bufferImageGranularityConflict =
true;
9945 if(bufferImageGranularityConflict)
9947 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9953 if(*pOffset >= suballocItem->offset + suballocItem->size)
9959 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9962 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9964 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9966 if(suballocItem->offset + totalSize > GetSize())
9973 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9974 if(totalSize > suballocItem->size)
9976 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9977 while(remainingSize > 0)
9980 if(lastSuballocItem == m_Suballocations.cend())
9984 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9986 *pSumFreeSize += lastSuballocItem->size;
9990 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9991 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9992 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9994 ++*itemsToMakeLostCount;
9995 *pSumItemSize += lastSuballocItem->size;
10002 remainingSize = (lastSuballocItem->size < remainingSize) ?
10003 remainingSize - lastSuballocItem->size : 0;
10009 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10011 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
10012 ++nextSuballocItem;
10013 while(nextSuballocItem != m_Suballocations.cend())
10015 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10016 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10018 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10020 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
10021 if(nextSuballoc.hAllocation->CanBecomeLost() &&
10022 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10024 ++*itemsToMakeLostCount;
10037 ++nextSuballocItem;
10043 const VmaSuballocation& suballoc = *suballocItem;
10044 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10046 *pSumFreeSize = suballoc.size;
10049 if(suballoc.size < allocSize)
10055 *pOffset = suballoc.offset;
10058 if(VMA_DEBUG_MARGIN > 0)
10060 *pOffset += VMA_DEBUG_MARGIN;
10064 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
10068 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
10070 bool bufferImageGranularityConflict =
false;
10071 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
10072 while(prevSuballocItem != m_Suballocations.cbegin())
10074 --prevSuballocItem;
10075 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10076 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10078 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10080 bufferImageGranularityConflict =
true;
10088 if(bufferImageGranularityConflict)
10090 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10095 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
10098 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10101 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
10108 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10110 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
10111 ++nextSuballocItem;
10112 while(nextSuballocItem != m_Suballocations.cend())
10114 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10115 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10117 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10127 ++nextSuballocItem;
10136 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
10138 VMA_ASSERT(item != m_Suballocations.end());
10139 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10141 VmaSuballocationList::iterator nextItem = item;
10143 VMA_ASSERT(nextItem != m_Suballocations.end());
10144 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
10146 item->size += nextItem->size;
10148 m_Suballocations.erase(nextItem);
10151 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
10154 VmaSuballocation& suballoc = *suballocItem;
10155 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10156 suballoc.hAllocation = VK_NULL_HANDLE;
10160 m_SumFreeSize += suballoc.size;
10163 bool mergeWithNext =
false;
10164 bool mergeWithPrev =
false;
10166 VmaSuballocationList::iterator nextItem = suballocItem;
10168 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
10170 mergeWithNext =
true;
10173 VmaSuballocationList::iterator prevItem = suballocItem;
10174 if(suballocItem != m_Suballocations.begin())
10177 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10179 mergeWithPrev =
true;
10185 UnregisterFreeSuballocation(nextItem);
10186 MergeFreeWithNext(suballocItem);
10191 UnregisterFreeSuballocation(prevItem);
10192 MergeFreeWithNext(prevItem);
10193 RegisterFreeSuballocation(prevItem);
10198 RegisterFreeSuballocation(suballocItem);
10199 return suballocItem;
10203 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
10205 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10206 VMA_ASSERT(item->size > 0);
10210 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10212 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10214 if(m_FreeSuballocationsBySize.empty())
10216 m_FreeSuballocationsBySize.push_back(item);
10220 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
10228 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
10230 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10231 VMA_ASSERT(item->size > 0);
10235 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10237 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10239 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
10240 m_FreeSuballocationsBySize.data(),
10241 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
10243 VmaSuballocationItemSizeLess());
10244 for(
size_t index = it - m_FreeSuballocationsBySize.data();
10245 index < m_FreeSuballocationsBySize.size();
10248 if(m_FreeSuballocationsBySize[index] == item)
10250 VmaVectorRemove(m_FreeSuballocationsBySize, index);
10253 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
10255 VMA_ASSERT(0 &&
"Not found.");
10261 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
10262 VkDeviceSize bufferImageGranularity,
10263 VmaSuballocationType& inOutPrevSuballocType)
const
10265 if(bufferImageGranularity == 1 || IsEmpty())
10270 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
10271 bool typeConflictFound =
false;
10272 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
10273 it != m_Suballocations.cend();
10276 const VmaSuballocationType suballocType = it->type;
10277 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
10279 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
10280 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
10282 typeConflictFound =
true;
10284 inOutPrevSuballocType = suballocType;
10288 return typeConflictFound || minAlignment >= bufferImageGranularity;
10294 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
10295 VmaBlockMetadata(hAllocator),
10297 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10298 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10299 m_1stVectorIndex(0),
10300 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
10301 m_1stNullItemsBeginCount(0),
10302 m_1stNullItemsMiddleCount(0),
10303 m_2ndNullItemsCount(0)
10307 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10311 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10313 VmaBlockMetadata::Init(size);
10314 m_SumFreeSize = size;
10317 bool VmaBlockMetadata_Linear::Validate()
const
10319 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10320 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10322 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10323 VMA_VALIDATE(!suballocations1st.empty() ||
10324 suballocations2nd.empty() ||
10325 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10327 if(!suballocations1st.empty())
10330 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10332 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10334 if(!suballocations2nd.empty())
10337 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10340 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10341 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10343 VkDeviceSize sumUsedSize = 0;
10344 const size_t suballoc1stCount = suballocations1st.size();
10345 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10347 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10349 const size_t suballoc2ndCount = suballocations2nd.size();
10350 size_t nullItem2ndCount = 0;
10351 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10353 const VmaSuballocation& suballoc = suballocations2nd[i];
10354 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10356 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10357 VMA_VALIDATE(suballoc.offset >= offset);
10361 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10362 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10363 sumUsedSize += suballoc.size;
10367 ++nullItem2ndCount;
10370 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10373 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10376 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10378 const VmaSuballocation& suballoc = suballocations1st[i];
10379 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10380 suballoc.hAllocation == VK_NULL_HANDLE);
10383 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10385 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10387 const VmaSuballocation& suballoc = suballocations1st[i];
10388 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10390 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10391 VMA_VALIDATE(suballoc.offset >= offset);
10392 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10396 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10397 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10398 sumUsedSize += suballoc.size;
10402 ++nullItem1stCount;
10405 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10407 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10409 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10411 const size_t suballoc2ndCount = suballocations2nd.size();
10412 size_t nullItem2ndCount = 0;
10413 for(
size_t i = suballoc2ndCount; i--; )
10415 const VmaSuballocation& suballoc = suballocations2nd[i];
10416 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10418 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10419 VMA_VALIDATE(suballoc.offset >= offset);
10423 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10424 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10425 sumUsedSize += suballoc.size;
10429 ++nullItem2ndCount;
10432 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10435 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10438 VMA_VALIDATE(offset <= GetSize());
10439 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10444 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10446 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10447 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10450 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10452 const VkDeviceSize size = GetSize();
10464 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10466 switch(m_2ndVectorMode)
10468 case SECOND_VECTOR_EMPTY:
10474 const size_t suballocations1stCount = suballocations1st.size();
10475 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10476 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10477 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10479 firstSuballoc.offset,
10480 size - (lastSuballoc.offset + lastSuballoc.size));
10484 case SECOND_VECTOR_RING_BUFFER:
10489 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10490 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10491 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10492 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10496 case SECOND_VECTOR_DOUBLE_STACK:
10501 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10502 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10503 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10504 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10514 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10516 const VkDeviceSize size = GetSize();
10517 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10518 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10519 const size_t suballoc1stCount = suballocations1st.size();
10520 const size_t suballoc2ndCount = suballocations2nd.size();
10531 VkDeviceSize lastOffset = 0;
10533 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10535 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10536 size_t nextAlloc2ndIndex = 0;
10537 while(lastOffset < freeSpace2ndTo1stEnd)
10540 while(nextAlloc2ndIndex < suballoc2ndCount &&
10541 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10543 ++nextAlloc2ndIndex;
10547 if(nextAlloc2ndIndex < suballoc2ndCount)
10549 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10552 if(lastOffset < suballoc.offset)
10555 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10569 lastOffset = suballoc.offset + suballoc.size;
10570 ++nextAlloc2ndIndex;
10576 if(lastOffset < freeSpace2ndTo1stEnd)
10578 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10586 lastOffset = freeSpace2ndTo1stEnd;
10591 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10592 const VkDeviceSize freeSpace1stTo2ndEnd =
10593 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10594 while(lastOffset < freeSpace1stTo2ndEnd)
10597 while(nextAlloc1stIndex < suballoc1stCount &&
10598 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10600 ++nextAlloc1stIndex;
10604 if(nextAlloc1stIndex < suballoc1stCount)
10606 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10609 if(lastOffset < suballoc.offset)
10612 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10626 lastOffset = suballoc.offset + suballoc.size;
10627 ++nextAlloc1stIndex;
10633 if(lastOffset < freeSpace1stTo2ndEnd)
10635 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10643 lastOffset = freeSpace1stTo2ndEnd;
10647 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10649 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10650 while(lastOffset < size)
10653 while(nextAlloc2ndIndex != SIZE_MAX &&
10654 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10656 --nextAlloc2ndIndex;
10660 if(nextAlloc2ndIndex != SIZE_MAX)
10662 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10665 if(lastOffset < suballoc.offset)
10668 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10682 lastOffset = suballoc.offset + suballoc.size;
10683 --nextAlloc2ndIndex;
10689 if(lastOffset < size)
10691 const VkDeviceSize unusedRangeSize = size - lastOffset;
10707 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10709 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10710 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10711 const VkDeviceSize size = GetSize();
10712 const size_t suballoc1stCount = suballocations1st.size();
10713 const size_t suballoc2ndCount = suballocations2nd.size();
10715 inoutStats.
size += size;
10717 VkDeviceSize lastOffset = 0;
10719 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10721 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10722 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10723 while(lastOffset < freeSpace2ndTo1stEnd)
10726 while(nextAlloc2ndIndex < suballoc2ndCount &&
10727 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10729 ++nextAlloc2ndIndex;
10733 if(nextAlloc2ndIndex < suballoc2ndCount)
10735 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10738 if(lastOffset < suballoc.offset)
10741 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10752 lastOffset = suballoc.offset + suballoc.size;
10753 ++nextAlloc2ndIndex;
10758 if(lastOffset < freeSpace2ndTo1stEnd)
10761 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10768 lastOffset = freeSpace2ndTo1stEnd;
10773 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10774 const VkDeviceSize freeSpace1stTo2ndEnd =
10775 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10776 while(lastOffset < freeSpace1stTo2ndEnd)
10779 while(nextAlloc1stIndex < suballoc1stCount &&
10780 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10782 ++nextAlloc1stIndex;
10786 if(nextAlloc1stIndex < suballoc1stCount)
10788 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10791 if(lastOffset < suballoc.offset)
10794 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10805 lastOffset = suballoc.offset + suballoc.size;
10806 ++nextAlloc1stIndex;
10811 if(lastOffset < freeSpace1stTo2ndEnd)
10814 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10821 lastOffset = freeSpace1stTo2ndEnd;
10825 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10827 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10828 while(lastOffset < size)
10831 while(nextAlloc2ndIndex != SIZE_MAX &&
10832 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10834 --nextAlloc2ndIndex;
10838 if(nextAlloc2ndIndex != SIZE_MAX)
10840 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10843 if(lastOffset < suballoc.offset)
10846 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10857 lastOffset = suballoc.offset + suballoc.size;
10858 --nextAlloc2ndIndex;
10863 if(lastOffset < size)
10866 const VkDeviceSize unusedRangeSize = size - lastOffset;
10879 #if VMA_STATS_STRING_ENABLED
10880 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10882 const VkDeviceSize size = GetSize();
10883 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10884 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10885 const size_t suballoc1stCount = suballocations1st.size();
10886 const size_t suballoc2ndCount = suballocations2nd.size();
10890 size_t unusedRangeCount = 0;
10891 VkDeviceSize usedBytes = 0;
10893 VkDeviceSize lastOffset = 0;
10895 size_t alloc2ndCount = 0;
10896 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10898 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10899 size_t nextAlloc2ndIndex = 0;
10900 while(lastOffset < freeSpace2ndTo1stEnd)
10903 while(nextAlloc2ndIndex < suballoc2ndCount &&
10904 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10906 ++nextAlloc2ndIndex;
10910 if(nextAlloc2ndIndex < suballoc2ndCount)
10912 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10915 if(lastOffset < suballoc.offset)
10918 ++unusedRangeCount;
10924 usedBytes += suballoc.size;
10927 lastOffset = suballoc.offset + suballoc.size;
10928 ++nextAlloc2ndIndex;
10933 if(lastOffset < freeSpace2ndTo1stEnd)
10936 ++unusedRangeCount;
10940 lastOffset = freeSpace2ndTo1stEnd;
10945 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10946 size_t alloc1stCount = 0;
10947 const VkDeviceSize freeSpace1stTo2ndEnd =
10948 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10949 while(lastOffset < freeSpace1stTo2ndEnd)
10952 while(nextAlloc1stIndex < suballoc1stCount &&
10953 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10955 ++nextAlloc1stIndex;
10959 if(nextAlloc1stIndex < suballoc1stCount)
10961 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10964 if(lastOffset < suballoc.offset)
10967 ++unusedRangeCount;
10973 usedBytes += suballoc.size;
10976 lastOffset = suballoc.offset + suballoc.size;
10977 ++nextAlloc1stIndex;
10982 if(lastOffset < size)
10985 ++unusedRangeCount;
10989 lastOffset = freeSpace1stTo2ndEnd;
10993 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10995 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10996 while(lastOffset < size)
10999 while(nextAlloc2ndIndex != SIZE_MAX &&
11000 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11002 --nextAlloc2ndIndex;
11006 if(nextAlloc2ndIndex != SIZE_MAX)
11008 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11011 if(lastOffset < suballoc.offset)
11014 ++unusedRangeCount;
11020 usedBytes += suballoc.size;
11023 lastOffset = suballoc.offset + suballoc.size;
11024 --nextAlloc2ndIndex;
11029 if(lastOffset < size)
11032 ++unusedRangeCount;
11041 const VkDeviceSize unusedBytes = size - usedBytes;
11042 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
11047 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11049 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
11050 size_t nextAlloc2ndIndex = 0;
11051 while(lastOffset < freeSpace2ndTo1stEnd)
11054 while(nextAlloc2ndIndex < suballoc2ndCount &&
11055 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11057 ++nextAlloc2ndIndex;
11061 if(nextAlloc2ndIndex < suballoc2ndCount)
11063 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11066 if(lastOffset < suballoc.offset)
11069 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11070 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11075 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11078 lastOffset = suballoc.offset + suballoc.size;
11079 ++nextAlloc2ndIndex;
11084 if(lastOffset < freeSpace2ndTo1stEnd)
11087 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
11088 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11092 lastOffset = freeSpace2ndTo1stEnd;
11097 nextAlloc1stIndex = m_1stNullItemsBeginCount;
11098 while(lastOffset < freeSpace1stTo2ndEnd)
11101 while(nextAlloc1stIndex < suballoc1stCount &&
11102 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11104 ++nextAlloc1stIndex;
11108 if(nextAlloc1stIndex < suballoc1stCount)
11110 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11113 if(lastOffset < suballoc.offset)
11116 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11117 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11122 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11125 lastOffset = suballoc.offset + suballoc.size;
11126 ++nextAlloc1stIndex;
11131 if(lastOffset < freeSpace1stTo2ndEnd)
11134 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
11135 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11139 lastOffset = freeSpace1stTo2ndEnd;
11143 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11145 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11146 while(lastOffset < size)
11149 while(nextAlloc2ndIndex != SIZE_MAX &&
11150 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11152 --nextAlloc2ndIndex;
11156 if(nextAlloc2ndIndex != SIZE_MAX)
11158 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11161 if(lastOffset < suballoc.offset)
11164 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11165 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11170 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11173 lastOffset = suballoc.offset + suballoc.size;
11174 --nextAlloc2ndIndex;
11179 if(lastOffset < size)
11182 const VkDeviceSize unusedRangeSize = size - lastOffset;
11183 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11192 PrintDetailedMap_End(json);
11196 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
11197 uint32_t currentFrameIndex,
11198 uint32_t frameInUseCount,
11199 VkDeviceSize bufferImageGranularity,
11200 VkDeviceSize allocSize,
11201 VkDeviceSize allocAlignment,
11203 VmaSuballocationType allocType,
11204 bool canMakeOtherLost,
11206 VmaAllocationRequest* pAllocationRequest)
11208 VMA_ASSERT(allocSize > 0);
11209 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
11210 VMA_ASSERT(pAllocationRequest != VMA_NULL);
11211 VMA_HEAVY_ASSERT(Validate());
11212 return upperAddress ?
11213 CreateAllocationRequest_UpperAddress(
11214 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11215 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
11216 CreateAllocationRequest_LowerAddress(
11217 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11218 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
11221 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
11222 uint32_t currentFrameIndex,
11223 uint32_t frameInUseCount,
11224 VkDeviceSize bufferImageGranularity,
11225 VkDeviceSize allocSize,
11226 VkDeviceSize allocAlignment,
11227 VmaSuballocationType allocType,
11228 bool canMakeOtherLost,
11230 VmaAllocationRequest* pAllocationRequest)
11232 const VkDeviceSize size = GetSize();
11233 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11234 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11236 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11238 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
11243 if(allocSize > size)
11247 VkDeviceSize resultBaseOffset = size - allocSize;
11248 if(!suballocations2nd.empty())
11250 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11251 resultBaseOffset = lastSuballoc.offset - allocSize;
11252 if(allocSize > lastSuballoc.offset)
11259 VkDeviceSize resultOffset = resultBaseOffset;
11262 if(VMA_DEBUG_MARGIN > 0)
11264 if(resultOffset < VMA_DEBUG_MARGIN)
11268 resultOffset -= VMA_DEBUG_MARGIN;
11272 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
11276 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11278 bool bufferImageGranularityConflict =
false;
11279 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11281 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11282 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11284 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
11286 bufferImageGranularityConflict =
true;
11294 if(bufferImageGranularityConflict)
11296 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
11301 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
11302 suballocations1st.back().offset + suballocations1st.back().size :
11304 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
11308 if(bufferImageGranularity > 1)
11310 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11312 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11313 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11315 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11329 pAllocationRequest->offset = resultOffset;
11330 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11331 pAllocationRequest->sumItemSize = 0;
11333 pAllocationRequest->itemsToMakeLostCount = 0;
11334 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11341 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11342 uint32_t currentFrameIndex,
11343 uint32_t frameInUseCount,
11344 VkDeviceSize bufferImageGranularity,
11345 VkDeviceSize allocSize,
11346 VkDeviceSize allocAlignment,
11347 VmaSuballocationType allocType,
11348 bool canMakeOtherLost,
11350 VmaAllocationRequest* pAllocationRequest)
11352 const VkDeviceSize size = GetSize();
11353 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11354 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11356 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11360 VkDeviceSize resultBaseOffset = 0;
11361 if(!suballocations1st.empty())
11363 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11364 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11368 VkDeviceSize resultOffset = resultBaseOffset;
11371 if(VMA_DEBUG_MARGIN > 0)
11373 resultOffset += VMA_DEBUG_MARGIN;
11377 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11381 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
11383 bool bufferImageGranularityConflict =
false;
11384 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11386 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11387 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11389 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11391 bufferImageGranularityConflict =
true;
11399 if(bufferImageGranularityConflict)
11401 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11405 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11406 suballocations2nd.back().offset : size;
11409 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11413 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11415 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11417 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11418 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11420 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11434 pAllocationRequest->offset = resultOffset;
11435 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11436 pAllocationRequest->sumItemSize = 0;
11438 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11439 pAllocationRequest->itemsToMakeLostCount = 0;
11446 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11448 VMA_ASSERT(!suballocations1st.empty());
11450 VkDeviceSize resultBaseOffset = 0;
11451 if(!suballocations2nd.empty())
11453 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11454 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11458 VkDeviceSize resultOffset = resultBaseOffset;
11461 if(VMA_DEBUG_MARGIN > 0)
11463 resultOffset += VMA_DEBUG_MARGIN;
11467 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11471 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11473 bool bufferImageGranularityConflict =
false;
11474 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11476 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11477 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11479 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11481 bufferImageGranularityConflict =
true;
11489 if(bufferImageGranularityConflict)
11491 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11495 pAllocationRequest->itemsToMakeLostCount = 0;
11496 pAllocationRequest->sumItemSize = 0;
11497 size_t index1st = m_1stNullItemsBeginCount;
11499 if(canMakeOtherLost)
11501 while(index1st < suballocations1st.size() &&
11502 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11505 const VmaSuballocation& suballoc = suballocations1st[index1st];
11506 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11512 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11513 if(suballoc.hAllocation->CanBecomeLost() &&
11514 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11516 ++pAllocationRequest->itemsToMakeLostCount;
11517 pAllocationRequest->sumItemSize += suballoc.size;
11529 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11531 while(index1st < suballocations1st.size())
11533 const VmaSuballocation& suballoc = suballocations1st[index1st];
11534 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11536 if(suballoc.hAllocation != VK_NULL_HANDLE)
11539 if(suballoc.hAllocation->CanBecomeLost() &&
11540 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11542 ++pAllocationRequest->itemsToMakeLostCount;
11543 pAllocationRequest->sumItemSize += suballoc.size;
11561 if(index1st == suballocations1st.size() &&
11562 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11565 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11570 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11571 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11575 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11577 for(
size_t nextSuballocIndex = index1st;
11578 nextSuballocIndex < suballocations1st.size();
11579 nextSuballocIndex++)
11581 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11582 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11584 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11598 pAllocationRequest->offset = resultOffset;
11599 pAllocationRequest->sumFreeSize =
11600 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11602 - pAllocationRequest->sumItemSize;
11603 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11612 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11613 uint32_t currentFrameIndex,
11614 uint32_t frameInUseCount,
11615 VmaAllocationRequest* pAllocationRequest)
11617 if(pAllocationRequest->itemsToMakeLostCount == 0)
11622 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11625 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11626 size_t index = m_1stNullItemsBeginCount;
11627 size_t madeLostCount = 0;
11628 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11630 if(index == suballocations->size())
11634 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11636 suballocations = &AccessSuballocations2nd();
11640 VMA_ASSERT(!suballocations->empty());
11642 VmaSuballocation& suballoc = (*suballocations)[index];
11643 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11645 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11646 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11647 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11649 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11650 suballoc.hAllocation = VK_NULL_HANDLE;
11651 m_SumFreeSize += suballoc.size;
11652 if(suballocations == &AccessSuballocations1st())
11654 ++m_1stNullItemsMiddleCount;
11658 ++m_2ndNullItemsCount;
11670 CleanupAfterFree();
11676 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11678 uint32_t lostAllocationCount = 0;
11680 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11681 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11683 VmaSuballocation& suballoc = suballocations1st[i];
11684 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11685 suballoc.hAllocation->CanBecomeLost() &&
11686 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11688 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11689 suballoc.hAllocation = VK_NULL_HANDLE;
11690 ++m_1stNullItemsMiddleCount;
11691 m_SumFreeSize += suballoc.size;
11692 ++lostAllocationCount;
11696 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11697 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11699 VmaSuballocation& suballoc = suballocations2nd[i];
11700 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11701 suballoc.hAllocation->CanBecomeLost() &&
11702 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11704 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11705 suballoc.hAllocation = VK_NULL_HANDLE;
11706 ++m_2ndNullItemsCount;
11707 m_SumFreeSize += suballoc.size;
11708 ++lostAllocationCount;
11712 if(lostAllocationCount)
11714 CleanupAfterFree();
11717 return lostAllocationCount;
11720 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11722 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11723 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11725 const VmaSuballocation& suballoc = suballocations1st[i];
11726 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11728 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11730 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11731 return VK_ERROR_VALIDATION_FAILED_EXT;
11733 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11735 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11736 return VK_ERROR_VALIDATION_FAILED_EXT;
11741 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11742 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11744 const VmaSuballocation& suballoc = suballocations2nd[i];
11745 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11747 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11749 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11750 return VK_ERROR_VALIDATION_FAILED_EXT;
11752 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11754 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11755 return VK_ERROR_VALIDATION_FAILED_EXT;
11763 void VmaBlockMetadata_Linear::Alloc(
11764 const VmaAllocationRequest& request,
11765 VmaSuballocationType type,
11766 VkDeviceSize allocSize,
11769 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11771 switch(request.type)
11773 case VmaAllocationRequestType::UpperAddress:
11775 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11776 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11777 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11778 suballocations2nd.push_back(newSuballoc);
11779 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11782 case VmaAllocationRequestType::EndOf1st:
11784 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11786 VMA_ASSERT(suballocations1st.empty() ||
11787 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11789 VMA_ASSERT(request.offset + allocSize <= GetSize());
11791 suballocations1st.push_back(newSuballoc);
11794 case VmaAllocationRequestType::EndOf2nd:
11796 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11798 VMA_ASSERT(!suballocations1st.empty() &&
11799 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11800 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11802 switch(m_2ndVectorMode)
11804 case SECOND_VECTOR_EMPTY:
11806 VMA_ASSERT(suballocations2nd.empty());
11807 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11809 case SECOND_VECTOR_RING_BUFFER:
11811 VMA_ASSERT(!suballocations2nd.empty());
11813 case SECOND_VECTOR_DOUBLE_STACK:
11814 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11820 suballocations2nd.push_back(newSuballoc);
11824 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11827 m_SumFreeSize -= newSuballoc.size;
11830 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11832 FreeAtOffset(allocation->GetOffset());
11835 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11837 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11838 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11840 if(!suballocations1st.empty())
11843 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11844 if(firstSuballoc.offset == offset)
11846 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11847 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11848 m_SumFreeSize += firstSuballoc.size;
11849 ++m_1stNullItemsBeginCount;
11850 CleanupAfterFree();
11856 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11857 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11859 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11860 if(lastSuballoc.offset == offset)
11862 m_SumFreeSize += lastSuballoc.size;
11863 suballocations2nd.pop_back();
11864 CleanupAfterFree();
11869 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11871 VmaSuballocation& lastSuballoc = suballocations1st.back();
11872 if(lastSuballoc.offset == offset)
11874 m_SumFreeSize += lastSuballoc.size;
11875 suballocations1st.pop_back();
11876 CleanupAfterFree();
11883 VmaSuballocation refSuballoc;
11884 refSuballoc.offset = offset;
11886 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11887 suballocations1st.begin() + m_1stNullItemsBeginCount,
11888 suballocations1st.end(),
11890 VmaSuballocationOffsetLess());
11891 if(it != suballocations1st.end())
11893 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11894 it->hAllocation = VK_NULL_HANDLE;
11895 ++m_1stNullItemsMiddleCount;
11896 m_SumFreeSize += it->size;
11897 CleanupAfterFree();
11902 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11905 VmaSuballocation refSuballoc;
11906 refSuballoc.offset = offset;
11908 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11909 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11910 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11911 if(it != suballocations2nd.end())
11913 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11914 it->hAllocation = VK_NULL_HANDLE;
11915 ++m_2ndNullItemsCount;
11916 m_SumFreeSize += it->size;
11917 CleanupAfterFree();
11922 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11925 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11927 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11928 const size_t suballocCount = AccessSuballocations1st().size();
11929 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11932 void VmaBlockMetadata_Linear::CleanupAfterFree()
11934 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11935 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11939 suballocations1st.clear();
11940 suballocations2nd.clear();
11941 m_1stNullItemsBeginCount = 0;
11942 m_1stNullItemsMiddleCount = 0;
11943 m_2ndNullItemsCount = 0;
11944 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11948 const size_t suballoc1stCount = suballocations1st.size();
11949 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11950 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11953 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11954 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11956 ++m_1stNullItemsBeginCount;
11957 --m_1stNullItemsMiddleCount;
11961 while(m_1stNullItemsMiddleCount > 0 &&
11962 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11964 --m_1stNullItemsMiddleCount;
11965 suballocations1st.pop_back();
11969 while(m_2ndNullItemsCount > 0 &&
11970 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11972 --m_2ndNullItemsCount;
11973 suballocations2nd.pop_back();
11977 while(m_2ndNullItemsCount > 0 &&
11978 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11980 --m_2ndNullItemsCount;
11981 VmaVectorRemove(suballocations2nd, 0);
11984 if(ShouldCompact1st())
11986 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11987 size_t srcIndex = m_1stNullItemsBeginCount;
11988 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11990 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11994 if(dstIndex != srcIndex)
11996 suballocations1st[dstIndex] = suballocations1st[srcIndex];
12000 suballocations1st.resize(nonNullItemCount);
12001 m_1stNullItemsBeginCount = 0;
12002 m_1stNullItemsMiddleCount = 0;
12006 if(suballocations2nd.empty())
12008 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12012 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
12014 suballocations1st.clear();
12015 m_1stNullItemsBeginCount = 0;
12017 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
12020 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12021 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
12022 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
12023 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12025 ++m_1stNullItemsBeginCount;
12026 --m_1stNullItemsMiddleCount;
12028 m_2ndNullItemsCount = 0;
12029 m_1stVectorIndex ^= 1;
12034 VMA_HEAVY_ASSERT(Validate());
12041 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
12042 VmaBlockMetadata(hAllocator),
12044 m_AllocationCount(0),
12048 memset(m_FreeList, 0,
sizeof(m_FreeList));
12051 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
12053 DeleteNode(m_Root);
12056 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
12058 VmaBlockMetadata::Init(size);
12060 m_UsableSize = VmaPrevPow2(size);
12061 m_SumFreeSize = m_UsableSize;
12065 while(m_LevelCount < MAX_LEVELS &&
12066 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
12071 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
12072 rootNode->offset = 0;
12073 rootNode->type = Node::TYPE_FREE;
12074 rootNode->parent = VMA_NULL;
12075 rootNode->buddy = VMA_NULL;
12078 AddToFreeListFront(0, rootNode);
12081 bool VmaBlockMetadata_Buddy::Validate()
const
12084 ValidationContext ctx;
12085 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
12087 VMA_VALIDATE(
false &&
"ValidateNode failed.");
12089 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
12090 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
12093 for(uint32_t level = 0; level < m_LevelCount; ++level)
12095 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
12096 m_FreeList[level].front->free.prev == VMA_NULL);
12098 for(Node* node = m_FreeList[level].front;
12100 node = node->free.next)
12102 VMA_VALIDATE(node->type == Node::TYPE_FREE);
12104 if(node->free.next == VMA_NULL)
12106 VMA_VALIDATE(m_FreeList[level].back == node);
12110 VMA_VALIDATE(node->free.next->free.prev == node);
12116 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
12118 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
12124 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
12126 for(uint32_t level = 0; level < m_LevelCount; ++level)
12128 if(m_FreeList[level].front != VMA_NULL)
12130 return LevelToNodeSize(level);
12136 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
12138 const VkDeviceSize unusableSize = GetUnusableSize();
12149 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
12151 if(unusableSize > 0)
12160 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
12162 const VkDeviceSize unusableSize = GetUnusableSize();
12164 inoutStats.
size += GetSize();
12165 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
12170 if(unusableSize > 0)
12177 #if VMA_STATS_STRING_ENABLED
12179 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
12183 CalcAllocationStatInfo(stat);
12185 PrintDetailedMap_Begin(
12191 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
12193 const VkDeviceSize unusableSize = GetUnusableSize();
12194 if(unusableSize > 0)
12196 PrintDetailedMap_UnusedRange(json,
12201 PrintDetailedMap_End(json);
12206 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
12207 uint32_t currentFrameIndex,
12208 uint32_t frameInUseCount,
12209 VkDeviceSize bufferImageGranularity,
12210 VkDeviceSize allocSize,
12211 VkDeviceSize allocAlignment,
12213 VmaSuballocationType allocType,
12214 bool canMakeOtherLost,
12216 VmaAllocationRequest* pAllocationRequest)
12218 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
12222 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
12223 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
12224 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
12226 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
12227 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
12230 if(allocSize > m_UsableSize)
12235 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12236 for(uint32_t level = targetLevel + 1; level--; )
12238 for(Node* freeNode = m_FreeList[level].front;
12239 freeNode != VMA_NULL;
12240 freeNode = freeNode->free.next)
12242 if(freeNode->offset % allocAlignment == 0)
12244 pAllocationRequest->type = VmaAllocationRequestType::Normal;
12245 pAllocationRequest->offset = freeNode->offset;
12246 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
12247 pAllocationRequest->sumItemSize = 0;
12248 pAllocationRequest->itemsToMakeLostCount = 0;
12249 pAllocationRequest->customData = (
void*)(uintptr_t)level;
12258 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
12259 uint32_t currentFrameIndex,
12260 uint32_t frameInUseCount,
12261 VmaAllocationRequest* pAllocationRequest)
12267 return pAllocationRequest->itemsToMakeLostCount == 0;
12270 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
12279 void VmaBlockMetadata_Buddy::Alloc(
12280 const VmaAllocationRequest& request,
12281 VmaSuballocationType type,
12282 VkDeviceSize allocSize,
12285 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
12287 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12288 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
12290 Node* currNode = m_FreeList[currLevel].front;
12291 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12292 while(currNode->offset != request.offset)
12294 currNode = currNode->free.next;
12295 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12299 while(currLevel < targetLevel)
12303 RemoveFromFreeList(currLevel, currNode);
12305 const uint32_t childrenLevel = currLevel + 1;
12308 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12309 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12311 leftChild->offset = currNode->offset;
12312 leftChild->type = Node::TYPE_FREE;
12313 leftChild->parent = currNode;
12314 leftChild->buddy = rightChild;
12316 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12317 rightChild->type = Node::TYPE_FREE;
12318 rightChild->parent = currNode;
12319 rightChild->buddy = leftChild;
12322 currNode->type = Node::TYPE_SPLIT;
12323 currNode->split.leftChild = leftChild;
12326 AddToFreeListFront(childrenLevel, rightChild);
12327 AddToFreeListFront(childrenLevel, leftChild);
12332 currNode = m_FreeList[currLevel].front;
12341 VMA_ASSERT(currLevel == targetLevel &&
12342 currNode != VMA_NULL &&
12343 currNode->type == Node::TYPE_FREE);
12344 RemoveFromFreeList(currLevel, currNode);
12347 currNode->type = Node::TYPE_ALLOCATION;
12348 currNode->allocation.alloc = hAllocation;
12350 ++m_AllocationCount;
12352 m_SumFreeSize -= allocSize;
12355 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12357 if(node->type == Node::TYPE_SPLIT)
12359 DeleteNode(node->split.leftChild->buddy);
12360 DeleteNode(node->split.leftChild);
12363 vma_delete(GetAllocationCallbacks(), node);
12366 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12368 VMA_VALIDATE(level < m_LevelCount);
12369 VMA_VALIDATE(curr->parent == parent);
12370 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12371 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12374 case Node::TYPE_FREE:
12376 ctx.calculatedSumFreeSize += levelNodeSize;
12377 ++ctx.calculatedFreeCount;
12379 case Node::TYPE_ALLOCATION:
12380 ++ctx.calculatedAllocationCount;
12381 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12382 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12384 case Node::TYPE_SPLIT:
12386 const uint32_t childrenLevel = level + 1;
12387 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12388 const Node*
const leftChild = curr->split.leftChild;
12389 VMA_VALIDATE(leftChild != VMA_NULL);
12390 VMA_VALIDATE(leftChild->offset == curr->offset);
12391 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12393 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12395 const Node*
const rightChild = leftChild->buddy;
12396 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12397 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12399 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12410 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12413 uint32_t level = 0;
12414 VkDeviceSize currLevelNodeSize = m_UsableSize;
12415 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12416 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12419 currLevelNodeSize = nextLevelNodeSize;
12420 nextLevelNodeSize = currLevelNodeSize >> 1;
12425 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12428 Node* node = m_Root;
12429 VkDeviceSize nodeOffset = 0;
12430 uint32_t level = 0;
12431 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12432 while(node->type == Node::TYPE_SPLIT)
12434 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12435 if(offset < nodeOffset + nextLevelSize)
12437 node = node->split.leftChild;
12441 node = node->split.leftChild->buddy;
12442 nodeOffset += nextLevelSize;
12445 levelNodeSize = nextLevelSize;
12448 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12449 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12452 --m_AllocationCount;
12453 m_SumFreeSize += alloc->GetSize();
12455 node->type = Node::TYPE_FREE;
12458 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12460 RemoveFromFreeList(level, node->buddy);
12461 Node*
const parent = node->parent;
12463 vma_delete(GetAllocationCallbacks(), node->buddy);
12464 vma_delete(GetAllocationCallbacks(), node);
12465 parent->type = Node::TYPE_FREE;
12473 AddToFreeListFront(level, node);
12476 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12480 case Node::TYPE_FREE:
12486 case Node::TYPE_ALLOCATION:
12488 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12494 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12495 if(unusedRangeSize > 0)
12504 case Node::TYPE_SPLIT:
12506 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12507 const Node*
const leftChild = node->split.leftChild;
12508 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12509 const Node*
const rightChild = leftChild->buddy;
12510 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12518 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12520 VMA_ASSERT(node->type == Node::TYPE_FREE);
12523 Node*
const frontNode = m_FreeList[level].front;
12524 if(frontNode == VMA_NULL)
12526 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12527 node->free.prev = node->free.next = VMA_NULL;
12528 m_FreeList[level].front = m_FreeList[level].back = node;
12532 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12533 node->free.prev = VMA_NULL;
12534 node->free.next = frontNode;
12535 frontNode->free.prev = node;
12536 m_FreeList[level].front = node;
12540 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12542 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12545 if(node->free.prev == VMA_NULL)
12547 VMA_ASSERT(m_FreeList[level].front == node);
12548 m_FreeList[level].front = node->free.next;
12552 Node*
const prevFreeNode = node->free.prev;
12553 VMA_ASSERT(prevFreeNode->free.next == node);
12554 prevFreeNode->free.next = node->free.next;
12558 if(node->free.next == VMA_NULL)
12560 VMA_ASSERT(m_FreeList[level].back == node);
12561 m_FreeList[level].back = node->free.prev;
12565 Node*
const nextFreeNode = node->free.next;
12566 VMA_ASSERT(nextFreeNode->free.prev == node);
12567 nextFreeNode->free.prev = node->free.prev;
12571 #if VMA_STATS_STRING_ENABLED
12572 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12576 case Node::TYPE_FREE:
12577 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12579 case Node::TYPE_ALLOCATION:
12581 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12582 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12583 if(allocSize < levelNodeSize)
12585 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12589 case Node::TYPE_SPLIT:
12591 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12592 const Node*
const leftChild = node->split.leftChild;
12593 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12594 const Node*
const rightChild = leftChild->buddy;
12595 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12608 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12609 m_pMetadata(VMA_NULL),
12610 m_MemoryTypeIndex(UINT32_MAX),
12612 m_hMemory(VK_NULL_HANDLE),
12614 m_pMappedData(VMA_NULL)
12618 void VmaDeviceMemoryBlock::Init(
12621 uint32_t newMemoryTypeIndex,
12622 VkDeviceMemory newMemory,
12623 VkDeviceSize newSize,
12625 uint32_t algorithm)
12627 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12629 m_hParentPool = hParentPool;
12630 m_MemoryTypeIndex = newMemoryTypeIndex;
12632 m_hMemory = newMemory;
12637 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12640 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12646 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12648 m_pMetadata->Init(newSize);
12651 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12655 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12657 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12658 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12659 m_hMemory = VK_NULL_HANDLE;
12661 vma_delete(allocator, m_pMetadata);
12662 m_pMetadata = VMA_NULL;
12665 bool VmaDeviceMemoryBlock::Validate()
const
12667 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12668 (m_pMetadata->GetSize() != 0));
12670 return m_pMetadata->Validate();
12673 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12675 void* pData =
nullptr;
12676 VkResult res = Map(hAllocator, 1, &pData);
12677 if(res != VK_SUCCESS)
12682 res = m_pMetadata->CheckCorruption(pData);
12684 Unmap(hAllocator, 1);
12689 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12696 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12697 if(m_MapCount != 0)
12699 m_MapCount += count;
12700 VMA_ASSERT(m_pMappedData != VMA_NULL);
12701 if(ppData != VMA_NULL)
12703 *ppData = m_pMappedData;
12709 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12710 hAllocator->m_hDevice,
12716 if(result == VK_SUCCESS)
12718 if(ppData != VMA_NULL)
12720 *ppData = m_pMappedData;
12722 m_MapCount = count;
12728 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12735 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12736 if(m_MapCount >= count)
12738 m_MapCount -= count;
12739 if(m_MapCount == 0)
12741 m_pMappedData = VMA_NULL;
12742 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12747 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12751 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12753 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12754 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12757 VkResult res = Map(hAllocator, 1, &pData);
12758 if(res != VK_SUCCESS)
12763 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12764 VmaWriteMagicValue(pData, allocOffset + allocSize);
12766 Unmap(hAllocator, 1);
12771 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12773 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12774 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12777 VkResult res = Map(hAllocator, 1, &pData);
12778 if(res != VK_SUCCESS)
12783 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12785 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12787 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12789 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12792 Unmap(hAllocator, 1);
12797 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12800 VkDeviceSize allocationLocalOffset,
12804 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12805 hAllocation->GetBlock() ==
this);
12806 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12807 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12808 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12810 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12811 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12814 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12817 VkDeviceSize allocationLocalOffset,
12821 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12822 hAllocation->GetBlock() ==
this);
12823 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12824 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12825 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12827 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12828 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12833 memset(&outInfo, 0,
sizeof(outInfo));
12852 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12860 VmaPool_T::VmaPool_T(
12863 VkDeviceSize preferredBlockSize) :
12867 createInfo.memoryTypeIndex,
12868 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12869 createInfo.minBlockCount,
12870 createInfo.maxBlockCount,
12872 createInfo.frameInUseCount,
12873 createInfo.blockSize != 0,
12875 createInfo.priority),
12881 VmaPool_T::~VmaPool_T()
12883 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
12886 void VmaPool_T::SetName(
const char* pName)
12888 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12889 VmaFreeString(allocs, m_Name);
12891 if(pName != VMA_NULL)
12893 m_Name = VmaCreateStringCopy(allocs, pName);
12901 #if VMA_STATS_STRING_ENABLED
12905 VmaBlockVector::VmaBlockVector(
12908 uint32_t memoryTypeIndex,
12909 VkDeviceSize preferredBlockSize,
12910 size_t minBlockCount,
12911 size_t maxBlockCount,
12912 VkDeviceSize bufferImageGranularity,
12913 uint32_t frameInUseCount,
12914 bool explicitBlockSize,
12915 uint32_t algorithm,
12917 m_hAllocator(hAllocator),
12918 m_hParentPool(hParentPool),
12919 m_MemoryTypeIndex(memoryTypeIndex),
12920 m_PreferredBlockSize(preferredBlockSize),
12921 m_MinBlockCount(minBlockCount),
12922 m_MaxBlockCount(maxBlockCount),
12923 m_BufferImageGranularity(bufferImageGranularity),
12924 m_FrameInUseCount(frameInUseCount),
12925 m_ExplicitBlockSize(explicitBlockSize),
12926 m_Algorithm(algorithm),
12927 m_Priority(priority),
12928 m_HasEmptyBlock(false),
12929 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12934 VmaBlockVector::~VmaBlockVector()
12936 for(
size_t i = m_Blocks.size(); i--; )
12938 m_Blocks[i]->Destroy(m_hAllocator);
12939 vma_delete(m_hAllocator, m_Blocks[i]);
12943 VkResult VmaBlockVector::CreateMinBlocks()
12945 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12947 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12948 if(res != VK_SUCCESS)
12956 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12958 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12960 const size_t blockCount = m_Blocks.size();
12969 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12971 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12972 VMA_ASSERT(pBlock);
12973 VMA_HEAVY_ASSERT(pBlock->Validate());
12974 pBlock->m_pMetadata->AddPoolStats(*pStats);
12978 bool VmaBlockVector::IsEmpty()
12980 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12981 return m_Blocks.empty();
12984 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12986 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12987 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12988 (VMA_DEBUG_MARGIN > 0) &&
12990 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12993 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12995 VkResult VmaBlockVector::Allocate(
12996 uint32_t currentFrameIndex,
12998 VkDeviceSize alignment,
13000 VmaSuballocationType suballocType,
13001 size_t allocationCount,
13005 VkResult res = VK_SUCCESS;
13007 if(IsCorruptionDetectionEnabled())
13009 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13010 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13014 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13015 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13017 res = AllocatePage(
13023 pAllocations + allocIndex);
13024 if(res != VK_SUCCESS)
13031 if(res != VK_SUCCESS)
13034 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13035 while(allocIndex--)
13037 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
13038 const VkDeviceSize allocSize = alloc->GetSize();
13040 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
13042 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
13048 VkResult VmaBlockVector::AllocatePage(
13049 uint32_t currentFrameIndex,
13051 VkDeviceSize alignment,
13053 VmaSuballocationType suballocType,
13061 VkDeviceSize freeMemory;
13063 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13065 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13069 const bool canFallbackToDedicated = !IsCustomPool();
13070 const bool canCreateNewBlock =
13072 (m_Blocks.size() < m_MaxBlockCount) &&
13073 (freeMemory >= size || !canFallbackToDedicated);
13080 canMakeOtherLost =
false;
13084 if(isUpperAddress &&
13087 return VK_ERROR_FEATURE_NOT_PRESENT;
13101 return VK_ERROR_FEATURE_NOT_PRESENT;
13105 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
13107 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13115 if(!canMakeOtherLost || canCreateNewBlock)
13124 if(!m_Blocks.empty())
13126 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
13127 VMA_ASSERT(pCurrBlock);
13128 VkResult res = AllocateFromBlock(
13138 if(res == VK_SUCCESS)
13140 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
13150 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13152 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13153 VMA_ASSERT(pCurrBlock);
13154 VkResult res = AllocateFromBlock(
13164 if(res == VK_SUCCESS)
13166 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13174 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13176 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13177 VMA_ASSERT(pCurrBlock);
13178 VkResult res = AllocateFromBlock(
13188 if(res == VK_SUCCESS)
13190 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13198 if(canCreateNewBlock)
13201 VkDeviceSize newBlockSize = m_PreferredBlockSize;
13202 uint32_t newBlockSizeShift = 0;
13203 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
13205 if(!m_ExplicitBlockSize)
13208 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
13209 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
13211 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13212 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
13214 newBlockSize = smallerNewBlockSize;
13215 ++newBlockSizeShift;
13224 size_t newBlockIndex = 0;
13225 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13226 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13228 if(!m_ExplicitBlockSize)
13230 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
13232 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13233 if(smallerNewBlockSize >= size)
13235 newBlockSize = smallerNewBlockSize;
13236 ++newBlockSizeShift;
13237 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13238 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13247 if(res == VK_SUCCESS)
13249 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
13250 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
13252 res = AllocateFromBlock(
13262 if(res == VK_SUCCESS)
13264 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
13270 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13277 if(canMakeOtherLost)
13279 uint32_t tryIndex = 0;
13280 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
13282 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
13283 VmaAllocationRequest bestRequest = {};
13284 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
13290 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13292 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13293 VMA_ASSERT(pCurrBlock);
13294 VmaAllocationRequest currRequest = {};
13295 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13298 m_BufferImageGranularity,
13307 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13308 if(pBestRequestBlock == VMA_NULL ||
13309 currRequestCost < bestRequestCost)
13311 pBestRequestBlock = pCurrBlock;
13312 bestRequest = currRequest;
13313 bestRequestCost = currRequestCost;
13315 if(bestRequestCost == 0)
13326 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13328 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13329 VMA_ASSERT(pCurrBlock);
13330 VmaAllocationRequest currRequest = {};
13331 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13334 m_BufferImageGranularity,
13343 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13344 if(pBestRequestBlock == VMA_NULL ||
13345 currRequestCost < bestRequestCost ||
13348 pBestRequestBlock = pCurrBlock;
13349 bestRequest = currRequest;
13350 bestRequestCost = currRequestCost;
13352 if(bestRequestCost == 0 ||
13362 if(pBestRequestBlock != VMA_NULL)
13366 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13367 if(res != VK_SUCCESS)
13373 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13379 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13380 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13381 UpdateHasEmptyBlock();
13382 (*pAllocation)->InitBlockAllocation(
13384 bestRequest.offset,
13391 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13392 VMA_DEBUG_LOG(
" Returned from existing block");
13393 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13394 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13395 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13397 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13399 if(IsCorruptionDetectionEnabled())
13401 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13402 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13417 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13419 return VK_ERROR_TOO_MANY_OBJECTS;
13423 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13426 void VmaBlockVector::Free(
13429 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13431 bool budgetExceeded =
false;
13433 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13435 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13436 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13441 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13443 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13445 if(IsCorruptionDetectionEnabled())
13447 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13448 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13451 if(hAllocation->IsPersistentMap())
13453 pBlock->Unmap(m_hAllocator, 1);
13456 pBlock->m_pMetadata->Free(hAllocation);
13457 VMA_HEAVY_ASSERT(pBlock->Validate());
13459 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13461 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13463 if(pBlock->m_pMetadata->IsEmpty())
13466 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13468 pBlockToDelete = pBlock;
13475 else if(m_HasEmptyBlock && canDeleteBlock)
13477 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13478 if(pLastBlock->m_pMetadata->IsEmpty())
13480 pBlockToDelete = pLastBlock;
13481 m_Blocks.pop_back();
13485 UpdateHasEmptyBlock();
13486 IncrementallySortBlocks();
13491 if(pBlockToDelete != VMA_NULL)
13493 VMA_DEBUG_LOG(
" Deleted empty block");
13494 pBlockToDelete->Destroy(m_hAllocator);
13495 vma_delete(m_hAllocator, pBlockToDelete);
13499 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13501 VkDeviceSize result = 0;
13502 for(
size_t i = m_Blocks.size(); i--; )
13504 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13505 if(result >= m_PreferredBlockSize)
13513 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13515 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13517 if(m_Blocks[blockIndex] == pBlock)
13519 VmaVectorRemove(m_Blocks, blockIndex);
13526 void VmaBlockVector::IncrementallySortBlocks()
13531 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13533 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13535 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13542 VkResult VmaBlockVector::AllocateFromBlock(
13543 VmaDeviceMemoryBlock* pBlock,
13544 uint32_t currentFrameIndex,
13546 VkDeviceSize alignment,
13549 VmaSuballocationType suballocType,
13558 VmaAllocationRequest currRequest = {};
13559 if(pBlock->m_pMetadata->CreateAllocationRequest(
13562 m_BufferImageGranularity,
13572 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13576 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13577 if(res != VK_SUCCESS)
13583 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13584 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13585 UpdateHasEmptyBlock();
13586 (*pAllocation)->InitBlockAllocation(
13588 currRequest.offset,
13595 VMA_HEAVY_ASSERT(pBlock->Validate());
13596 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13597 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13598 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13600 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13602 if(IsCorruptionDetectionEnabled())
13604 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13605 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13609 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13612 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13614 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13615 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13616 allocInfo.allocationSize = blockSize;
13618 #if VMA_BUFFER_DEVICE_ADDRESS
13620 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13621 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13623 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13624 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13628 #if VMA_MEMORY_PRIORITY
13629 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
13630 if(m_hAllocator->m_UseExtMemoryPriority)
13632 priorityInfo.priority = m_Priority;
13633 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
13637 VkDeviceMemory mem = VK_NULL_HANDLE;
13638 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13647 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13653 allocInfo.allocationSize,
13657 m_Blocks.push_back(pBlock);
13658 if(pNewBlockIndex != VMA_NULL)
13660 *pNewBlockIndex = m_Blocks.size() - 1;
13666 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13667 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13668 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13670 const size_t blockCount = m_Blocks.size();
13671 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13675 BLOCK_FLAG_USED = 0x00000001,
13676 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13684 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13685 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13686 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13689 const size_t moveCount = moves.size();
13690 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13692 const VmaDefragmentationMove& move = moves[moveIndex];
13693 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13694 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13697 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13700 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13702 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13703 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13704 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13706 currBlockInfo.pMappedData = pBlock->GetMappedData();
13708 if(currBlockInfo.pMappedData == VMA_NULL)
13710 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13711 if(pDefragCtx->res == VK_SUCCESS)
13713 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13720 if(pDefragCtx->res == VK_SUCCESS)
13722 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13723 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13725 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13727 const VmaDefragmentationMove& move = moves[moveIndex];
13729 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13730 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13732 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13737 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13738 memRange.memory = pSrcBlock->GetDeviceMemory();
13739 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13740 memRange.size = VMA_MIN(
13741 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13742 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13743 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13748 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13749 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13750 static_cast<size_t>(move.size));
13752 if(IsCorruptionDetectionEnabled())
13754 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13755 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13761 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13762 memRange.memory = pDstBlock->GetDeviceMemory();
13763 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13764 memRange.size = VMA_MIN(
13765 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13766 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13767 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13774 for(
size_t blockIndex = blockCount; blockIndex--; )
13776 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13777 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13779 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13780 pBlock->Unmap(m_hAllocator, 1);
13785 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13786 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13787 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13788 VkCommandBuffer commandBuffer)
13790 const size_t blockCount = m_Blocks.size();
13792 pDefragCtx->blockContexts.resize(blockCount);
13793 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13796 const size_t moveCount = moves.size();
13797 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13799 const VmaDefragmentationMove& move = moves[moveIndex];
13804 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13805 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13809 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13813 VkBufferCreateInfo bufCreateInfo;
13814 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13816 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13818 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13819 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13820 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13822 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13823 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13824 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13825 if(pDefragCtx->res == VK_SUCCESS)
13827 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13828 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13835 if(pDefragCtx->res == VK_SUCCESS)
13837 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13839 const VmaDefragmentationMove& move = moves[moveIndex];
13841 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13842 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13844 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13846 VkBufferCopy region = {
13850 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13851 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13856 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13858 pDefragCtx->res = VK_NOT_READY;
13864 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13866 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13867 if(pBlock->m_pMetadata->IsEmpty())
13869 if(m_Blocks.size() > m_MinBlockCount)
13871 if(pDefragmentationStats != VMA_NULL)
13874 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13877 VmaVectorRemove(m_Blocks, blockIndex);
13878 pBlock->Destroy(m_hAllocator);
13879 vma_delete(m_hAllocator, pBlock);
13887 UpdateHasEmptyBlock();
13890 void VmaBlockVector::UpdateHasEmptyBlock()
13892 m_HasEmptyBlock =
false;
13893 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13895 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13896 if(pBlock->m_pMetadata->IsEmpty())
13898 m_HasEmptyBlock =
true;
13904 #if VMA_STATS_STRING_ENABLED
13906 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13908 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13910 json.BeginObject();
13914 const char* poolName = m_hParentPool->GetName();
13915 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13917 json.WriteString(
"Name");
13918 json.WriteString(poolName);
13921 json.WriteString(
"MemoryTypeIndex");
13922 json.WriteNumber(m_MemoryTypeIndex);
13924 json.WriteString(
"BlockSize");
13925 json.WriteNumber(m_PreferredBlockSize);
13927 json.WriteString(
"BlockCount");
13928 json.BeginObject(
true);
13929 if(m_MinBlockCount > 0)
13931 json.WriteString(
"Min");
13932 json.WriteNumber((uint64_t)m_MinBlockCount);
13934 if(m_MaxBlockCount < SIZE_MAX)
13936 json.WriteString(
"Max");
13937 json.WriteNumber((uint64_t)m_MaxBlockCount);
13939 json.WriteString(
"Cur");
13940 json.WriteNumber((uint64_t)m_Blocks.size());
13943 if(m_FrameInUseCount > 0)
13945 json.WriteString(
"FrameInUseCount");
13946 json.WriteNumber(m_FrameInUseCount);
13949 if(m_Algorithm != 0)
13951 json.WriteString(
"Algorithm");
13952 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13957 json.WriteString(
"PreferredBlockSize");
13958 json.WriteNumber(m_PreferredBlockSize);
13961 json.WriteString(
"Blocks");
13962 json.BeginObject();
13963 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13965 json.BeginString();
13966 json.ContinueString(m_Blocks[i]->GetId());
13969 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13978 void VmaBlockVector::Defragment(
13979 class VmaBlockVectorDefragmentationContext* pCtx,
13981 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13982 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13983 VkCommandBuffer commandBuffer)
13985 pCtx->res = VK_SUCCESS;
13987 const VkMemoryPropertyFlags memPropFlags =
13988 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13989 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13991 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13993 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13994 !IsCorruptionDetectionEnabled() &&
13995 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13998 if(canDefragmentOnCpu || canDefragmentOnGpu)
14000 bool defragmentOnGpu;
14002 if(canDefragmentOnGpu != canDefragmentOnCpu)
14004 defragmentOnGpu = canDefragmentOnGpu;
14009 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
14010 m_hAllocator->IsIntegratedGpu();
14013 bool overlappingMoveSupported = !defragmentOnGpu;
14015 if(m_hAllocator->m_UseMutex)
14019 if(!m_Mutex.TryLockWrite())
14021 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
14027 m_Mutex.LockWrite();
14028 pCtx->mutexLocked =
true;
14032 pCtx->Begin(overlappingMoveSupported, flags);
14036 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
14037 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
14038 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
14041 if(pStats != VMA_NULL)
14043 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
14044 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
14047 VMA_ASSERT(bytesMoved <= maxBytesToMove);
14048 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
14049 if(defragmentOnGpu)
14051 maxGpuBytesToMove -= bytesMoved;
14052 maxGpuAllocationsToMove -= allocationsMoved;
14056 maxCpuBytesToMove -= bytesMoved;
14057 maxCpuAllocationsToMove -= allocationsMoved;
14063 if(m_hAllocator->m_UseMutex)
14064 m_Mutex.UnlockWrite();
14066 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
14067 pCtx->res = VK_NOT_READY;
14072 if(pCtx->res >= VK_SUCCESS)
14074 if(defragmentOnGpu)
14076 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
14080 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
14086 void VmaBlockVector::DefragmentationEnd(
14087 class VmaBlockVectorDefragmentationContext* pCtx,
14093 VMA_ASSERT(pCtx->mutexLocked ==
false);
14097 m_Mutex.LockWrite();
14098 pCtx->mutexLocked =
true;
14102 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
14105 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
14107 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
14108 if(blockCtx.hBuffer)
14110 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
14114 if(pCtx->res >= VK_SUCCESS)
14116 FreeEmptyBlocks(pStats);
14120 if(pCtx->mutexLocked)
14122 VMA_ASSERT(m_hAllocator->m_UseMutex);
14123 m_Mutex.UnlockWrite();
14127 uint32_t VmaBlockVector::ProcessDefragmentations(
14128 class VmaBlockVectorDefragmentationContext *pCtx,
14131 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14133 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
14135 for(uint32_t i = 0; i < moveCount; ++ i)
14137 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
14140 pMove->
memory = move.pDstBlock->GetDeviceMemory();
14141 pMove->
offset = move.dstOffset;
14146 pCtx->defragmentationMovesProcessed += moveCount;
14151 void VmaBlockVector::CommitDefragmentations(
14152 class VmaBlockVectorDefragmentationContext *pCtx,
14155 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14157 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
14159 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
14161 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
14162 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
14165 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
14166 FreeEmptyBlocks(pStats);
14169 size_t VmaBlockVector::CalcAllocationCount()
const
14172 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14174 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
14179 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
14181 if(m_BufferImageGranularity == 1)
14185 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
14186 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
14188 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
14189 VMA_ASSERT(m_Algorithm == 0);
14190 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
14191 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
14199 void VmaBlockVector::MakePoolAllocationsLost(
14200 uint32_t currentFrameIndex,
14201 size_t* pLostAllocationCount)
14203 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14204 size_t lostAllocationCount = 0;
14205 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14207 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14208 VMA_ASSERT(pBlock);
14209 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
14211 if(pLostAllocationCount != VMA_NULL)
14213 *pLostAllocationCount = lostAllocationCount;
14217 VkResult VmaBlockVector::CheckCorruption()
14219 if(!IsCorruptionDetectionEnabled())
14221 return VK_ERROR_FEATURE_NOT_PRESENT;
14224 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14225 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14227 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14228 VMA_ASSERT(pBlock);
14229 VkResult res = pBlock->CheckCorruption(m_hAllocator);
14230 if(res != VK_SUCCESS)
14238 void VmaBlockVector::AddStats(
VmaStats* pStats)
14240 const uint32_t memTypeIndex = m_MemoryTypeIndex;
14241 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
14243 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14245 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14247 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14248 VMA_ASSERT(pBlock);
14249 VMA_HEAVY_ASSERT(pBlock->Validate());
14251 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
14252 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14253 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14254 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14261 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
14263 VmaBlockVector* pBlockVector,
14264 uint32_t currentFrameIndex,
14265 bool overlappingMoveSupported) :
14266 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14267 m_AllocationCount(0),
14268 m_AllAllocations(false),
14270 m_AllocationsMoved(0),
14271 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
14274 const size_t blockCount = m_pBlockVector->m_Blocks.size();
14275 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14277 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
14278 pBlockInfo->m_OriginalBlockIndex = blockIndex;
14279 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
14280 m_Blocks.push_back(pBlockInfo);
14284 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
14287 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
14289 for(
size_t i = m_Blocks.size(); i--; )
14291 vma_delete(m_hAllocator, m_Blocks[i]);
14295 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14298 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
14300 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
14301 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
14302 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
14304 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
14305 (*it)->m_Allocations.push_back(allocInfo);
14312 ++m_AllocationCount;
14316 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
14317 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14318 VkDeviceSize maxBytesToMove,
14319 uint32_t maxAllocationsToMove,
14320 bool freeOldAllocations)
14322 if(m_Blocks.empty())
14335 size_t srcBlockMinIndex = 0;
14348 size_t srcBlockIndex = m_Blocks.size() - 1;
14349 size_t srcAllocIndex = SIZE_MAX;
14355 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14357 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14360 if(srcBlockIndex == srcBlockMinIndex)
14367 srcAllocIndex = SIZE_MAX;
14372 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14376 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14377 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14379 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14380 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14381 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14382 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14385 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14387 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14388 VmaAllocationRequest dstAllocRequest;
14389 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14390 m_CurrentFrameIndex,
14391 m_pBlockVector->GetFrameInUseCount(),
14392 m_pBlockVector->GetBufferImageGranularity(),
14399 &dstAllocRequest) &&
14401 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14403 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14406 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14407 (m_BytesMoved + size > maxBytesToMove))
14412 VmaDefragmentationMove move = {};
14413 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14414 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14415 move.srcOffset = srcOffset;
14416 move.dstOffset = dstAllocRequest.offset;
14418 move.hAllocation = allocInfo.m_hAllocation;
14419 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14420 move.pDstBlock = pDstBlockInfo->m_pBlock;
14422 moves.push_back(move);
14424 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14428 allocInfo.m_hAllocation);
14430 if(freeOldAllocations)
14432 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14433 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14436 if(allocInfo.m_pChanged != VMA_NULL)
14438 *allocInfo.m_pChanged = VK_TRUE;
14441 ++m_AllocationsMoved;
14442 m_BytesMoved += size;
14444 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14452 if(srcAllocIndex > 0)
14458 if(srcBlockIndex > 0)
14461 srcAllocIndex = SIZE_MAX;
14471 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14474 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14476 if(m_Blocks[i]->m_HasNonMovableAllocations)
14484 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14485 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14486 VkDeviceSize maxBytesToMove,
14487 uint32_t maxAllocationsToMove,
14490 if(!m_AllAllocations && m_AllocationCount == 0)
14495 const size_t blockCount = m_Blocks.size();
14496 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14498 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14500 if(m_AllAllocations)
14502 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14503 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14504 it != pMetadata->m_Suballocations.end();
14507 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14509 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14510 pBlockInfo->m_Allocations.push_back(allocInfo);
14515 pBlockInfo->CalcHasNonMovableAllocations();
14519 pBlockInfo->SortAllocationsByOffsetDescending();
14525 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14528 const uint32_t roundCount = 2;
14531 VkResult result = VK_SUCCESS;
14532 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14540 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14541 size_t dstBlockIndex, VkDeviceSize dstOffset,
14542 size_t srcBlockIndex, VkDeviceSize srcOffset)
14544 if(dstBlockIndex < srcBlockIndex)
14548 if(dstBlockIndex > srcBlockIndex)
14552 if(dstOffset < srcOffset)
14562 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14564 VmaBlockVector* pBlockVector,
14565 uint32_t currentFrameIndex,
14566 bool overlappingMoveSupported) :
14567 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14568 m_OverlappingMoveSupported(overlappingMoveSupported),
14569 m_AllocationCount(0),
14570 m_AllAllocations(false),
14572 m_AllocationsMoved(0),
14573 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14575 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14579 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14583 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14584 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14585 VkDeviceSize maxBytesToMove,
14586 uint32_t maxAllocationsToMove,
14589 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14591 const size_t blockCount = m_pBlockVector->GetBlockCount();
14592 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14597 PreprocessMetadata();
14601 m_BlockInfos.resize(blockCount);
14602 for(
size_t i = 0; i < blockCount; ++i)
14604 m_BlockInfos[i].origBlockIndex = i;
14607 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14608 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14609 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14614 FreeSpaceDatabase freeSpaceDb;
14616 size_t dstBlockInfoIndex = 0;
14617 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14618 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14619 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14620 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14621 VkDeviceSize dstOffset = 0;
14624 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14626 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14627 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14628 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14629 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14630 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14632 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14633 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14634 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14635 if(m_AllocationsMoved == maxAllocationsToMove ||
14636 m_BytesMoved + srcAllocSize > maxBytesToMove)
14641 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14643 VmaDefragmentationMove move = {};
14645 size_t freeSpaceInfoIndex;
14646 VkDeviceSize dstAllocOffset;
14647 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14648 freeSpaceInfoIndex, dstAllocOffset))
14650 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14651 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14652 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14655 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14657 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14661 VmaSuballocation suballoc = *srcSuballocIt;
14662 suballoc.offset = dstAllocOffset;
14663 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14664 m_BytesMoved += srcAllocSize;
14665 ++m_AllocationsMoved;
14667 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14669 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14670 srcSuballocIt = nextSuballocIt;
14672 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14674 move.srcBlockIndex = srcOrigBlockIndex;
14675 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14676 move.srcOffset = srcAllocOffset;
14677 move.dstOffset = dstAllocOffset;
14678 move.size = srcAllocSize;
14680 moves.push_back(move);
14687 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14689 VmaSuballocation suballoc = *srcSuballocIt;
14690 suballoc.offset = dstAllocOffset;
14691 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14692 m_BytesMoved += srcAllocSize;
14693 ++m_AllocationsMoved;
14695 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14697 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14698 srcSuballocIt = nextSuballocIt;
14700 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14702 move.srcBlockIndex = srcOrigBlockIndex;
14703 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14704 move.srcOffset = srcAllocOffset;
14705 move.dstOffset = dstAllocOffset;
14706 move.size = srcAllocSize;
14708 moves.push_back(move);
14713 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14716 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14717 dstAllocOffset + srcAllocSize > dstBlockSize)
14720 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14722 ++dstBlockInfoIndex;
14723 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14724 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14725 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14726 dstBlockSize = pDstMetadata->GetSize();
14728 dstAllocOffset = 0;
14732 if(dstBlockInfoIndex == srcBlockInfoIndex)
14734 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14736 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14738 bool skipOver = overlap;
14739 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14743 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14748 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14750 dstOffset = srcAllocOffset + srcAllocSize;
14756 srcSuballocIt->offset = dstAllocOffset;
14757 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14758 dstOffset = dstAllocOffset + srcAllocSize;
14759 m_BytesMoved += srcAllocSize;
14760 ++m_AllocationsMoved;
14763 move.srcBlockIndex = srcOrigBlockIndex;
14764 move.dstBlockIndex = dstOrigBlockIndex;
14765 move.srcOffset = srcAllocOffset;
14766 move.dstOffset = dstAllocOffset;
14767 move.size = srcAllocSize;
14769 moves.push_back(move);
14777 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14778 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14780 VmaSuballocation suballoc = *srcSuballocIt;
14781 suballoc.offset = dstAllocOffset;
14782 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14783 dstOffset = dstAllocOffset + srcAllocSize;
14784 m_BytesMoved += srcAllocSize;
14785 ++m_AllocationsMoved;
14787 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14789 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14790 srcSuballocIt = nextSuballocIt;
14792 pDstMetadata->m_Suballocations.push_back(suballoc);
14794 move.srcBlockIndex = srcOrigBlockIndex;
14795 move.dstBlockIndex = dstOrigBlockIndex;
14796 move.srcOffset = srcAllocOffset;
14797 move.dstOffset = dstAllocOffset;
14798 move.size = srcAllocSize;
14800 moves.push_back(move);
14806 m_BlockInfos.clear();
14808 PostprocessMetadata();
14813 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14815 const size_t blockCount = m_pBlockVector->GetBlockCount();
14816 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14818 VmaBlockMetadata_Generic*
const pMetadata =
14819 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14820 pMetadata->m_FreeCount = 0;
14821 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14822 pMetadata->m_FreeSuballocationsBySize.clear();
14823 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14824 it != pMetadata->m_Suballocations.end(); )
14826 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14828 VmaSuballocationList::iterator nextIt = it;
14830 pMetadata->m_Suballocations.erase(it);
14841 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14843 const size_t blockCount = m_pBlockVector->GetBlockCount();
14844 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14846 VmaBlockMetadata_Generic*
const pMetadata =
14847 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14848 const VkDeviceSize blockSize = pMetadata->GetSize();
14851 if(pMetadata->m_Suballocations.empty())
14853 pMetadata->m_FreeCount = 1;
14855 VmaSuballocation suballoc = {
14859 VMA_SUBALLOCATION_TYPE_FREE };
14860 pMetadata->m_Suballocations.push_back(suballoc);
14861 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14866 VkDeviceSize offset = 0;
14867 VmaSuballocationList::iterator it;
14868 for(it = pMetadata->m_Suballocations.begin();
14869 it != pMetadata->m_Suballocations.end();
14872 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14873 VMA_ASSERT(it->offset >= offset);
14876 if(it->offset > offset)
14878 ++pMetadata->m_FreeCount;
14879 const VkDeviceSize freeSize = it->offset - offset;
14880 VmaSuballocation suballoc = {
14884 VMA_SUBALLOCATION_TYPE_FREE };
14885 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14886 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14888 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14892 pMetadata->m_SumFreeSize -= it->size;
14893 offset = it->offset + it->size;
14897 if(offset < blockSize)
14899 ++pMetadata->m_FreeCount;
14900 const VkDeviceSize freeSize = blockSize - offset;
14901 VmaSuballocation suballoc = {
14905 VMA_SUBALLOCATION_TYPE_FREE };
14906 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14907 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14908 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14910 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14915 pMetadata->m_FreeSuballocationsBySize.begin(),
14916 pMetadata->m_FreeSuballocationsBySize.end(),
14917 VmaSuballocationItemSizeLess());
14920 VMA_HEAVY_ASSERT(pMetadata->Validate());
14924 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14927 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14928 while(it != pMetadata->m_Suballocations.end())
14930 if(it->offset < suballoc.offset)
14935 pMetadata->m_Suballocations.insert(it, suballoc);
14941 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14944 VmaBlockVector* pBlockVector,
14945 uint32_t currFrameIndex) :
14947 mutexLocked(false),
14948 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14949 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14950 defragmentationMovesProcessed(0),
14951 defragmentationMovesCommitted(0),
14952 hasDefragmentationPlan(0),
14953 m_hAllocator(hAllocator),
14954 m_hCustomPool(hCustomPool),
14955 m_pBlockVector(pBlockVector),
14956 m_CurrFrameIndex(currFrameIndex),
14957 m_pAlgorithm(VMA_NULL),
14958 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14959 m_AllAllocations(false)
14963 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14965 vma_delete(m_hAllocator, m_pAlgorithm);
14968 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14970 AllocInfo info = { hAlloc, pChanged };
14971 m_Allocations.push_back(info);
14974 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14976 const bool allAllocations = m_AllAllocations ||
14977 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14990 if(VMA_DEBUG_MARGIN == 0 &&
14992 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14995 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14996 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15000 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
15001 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15006 m_pAlgorithm->AddAll();
15010 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
15012 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
15020 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
15022 uint32_t currFrameIndex,
15025 m_hAllocator(hAllocator),
15026 m_CurrFrameIndex(currFrameIndex),
15029 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
15031 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
15034 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
15036 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15038 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
15039 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15040 vma_delete(m_hAllocator, pBlockVectorCtx);
15042 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
15044 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
15045 if(pBlockVectorCtx)
15047 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15048 vma_delete(m_hAllocator, pBlockVectorCtx);
15053 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
15055 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15057 VmaPool pool = pPools[poolIndex];
15060 if(pool->m_BlockVector.GetAlgorithm() == 0)
15062 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15064 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15066 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
15068 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15073 if(!pBlockVectorDefragCtx)
15075 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15078 &pool->m_BlockVector,
15080 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15083 pBlockVectorDefragCtx->AddAll();
15088 void VmaDefragmentationContext_T::AddAllocations(
15089 uint32_t allocationCount,
15091 VkBool32* pAllocationsChanged)
15094 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15097 VMA_ASSERT(hAlloc);
15099 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
15101 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
15103 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15105 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
15107 if(hAllocPool != VK_NULL_HANDLE)
15110 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
15112 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15114 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
15116 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15120 if(!pBlockVectorDefragCtx)
15122 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15125 &hAllocPool->m_BlockVector,
15127 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15134 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
15135 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
15136 if(!pBlockVectorDefragCtx)
15138 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15141 m_hAllocator->m_pBlockVectors[memTypeIndex],
15143 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
15147 if(pBlockVectorDefragCtx)
15149 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
15150 &pAllocationsChanged[allocIndex] : VMA_NULL;
15151 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
15157 VkResult VmaDefragmentationContext_T::Defragment(
15158 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
15159 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
15171 m_MaxCpuBytesToMove = maxCpuBytesToMove;
15172 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
15174 m_MaxGpuBytesToMove = maxGpuBytesToMove;
15175 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
15177 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
15178 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
15181 return VK_NOT_READY;
15184 if(commandBuffer == VK_NULL_HANDLE)
15186 maxGpuBytesToMove = 0;
15187 maxGpuAllocationsToMove = 0;
15190 VkResult res = VK_SUCCESS;
15193 for(uint32_t memTypeIndex = 0;
15194 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
15197 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15198 if(pBlockVectorCtx)
15200 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15201 pBlockVectorCtx->GetBlockVector()->Defragment(
15204 maxCpuBytesToMove, maxCpuAllocationsToMove,
15205 maxGpuBytesToMove, maxGpuAllocationsToMove,
15207 if(pBlockVectorCtx->res != VK_SUCCESS)
15209 res = pBlockVectorCtx->res;
15215 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15216 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
15219 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15220 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15221 pBlockVectorCtx->GetBlockVector()->Defragment(
15224 maxCpuBytesToMove, maxCpuAllocationsToMove,
15225 maxGpuBytesToMove, maxGpuAllocationsToMove,
15227 if(pBlockVectorCtx->res != VK_SUCCESS)
15229 res = pBlockVectorCtx->res;
15242 for(uint32_t memTypeIndex = 0;
15243 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15246 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15247 if(pBlockVectorCtx)
15249 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15251 if(!pBlockVectorCtx->hasDefragmentationPlan)
15253 pBlockVectorCtx->GetBlockVector()->Defragment(
15256 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15257 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15260 if(pBlockVectorCtx->res < VK_SUCCESS)
15263 pBlockVectorCtx->hasDefragmentationPlan =
true;
15266 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15268 pCurrentMove, movesLeft);
15270 movesLeft -= processed;
15271 pCurrentMove += processed;
15276 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15277 customCtxIndex < customCtxCount;
15280 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15281 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15283 if(!pBlockVectorCtx->hasDefragmentationPlan)
15285 pBlockVectorCtx->GetBlockVector()->Defragment(
15288 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15289 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15292 if(pBlockVectorCtx->res < VK_SUCCESS)
15295 pBlockVectorCtx->hasDefragmentationPlan =
true;
15298 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15300 pCurrentMove, movesLeft);
15302 movesLeft -= processed;
15303 pCurrentMove += processed;
15310 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
15312 VkResult res = VK_SUCCESS;
15315 for(uint32_t memTypeIndex = 0;
15316 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15319 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15320 if(pBlockVectorCtx)
15322 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15324 if(!pBlockVectorCtx->hasDefragmentationPlan)
15326 res = VK_NOT_READY;
15330 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15331 pBlockVectorCtx, m_pStats);
15333 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15334 res = VK_NOT_READY;
15339 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15340 customCtxIndex < customCtxCount;
15343 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15344 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15346 if(!pBlockVectorCtx->hasDefragmentationPlan)
15348 res = VK_NOT_READY;
15352 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15353 pBlockVectorCtx, m_pStats);
15355 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15356 res = VK_NOT_READY;
15365 #if VMA_RECORDING_ENABLED
15367 VmaRecorder::VmaRecorder() :
15371 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15377 m_UseMutex = useMutex;
15378 m_Flags = settings.
flags;
15380 #if defined(_WIN32)
15382 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15386 return VK_ERROR_INITIALIZATION_FAILED;
15390 m_File = fopen(settings.
pFilePath,
"wb");
15394 return VK_ERROR_INITIALIZATION_FAILED;
15399 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15400 fprintf(m_File,
"%s\n",
"1,8");
15405 VmaRecorder::~VmaRecorder()
15407 if(m_File != VMA_NULL)
15413 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15415 CallParams callParams;
15416 GetBasicParams(callParams);
15418 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15419 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15423 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15425 CallParams callParams;
15426 GetBasicParams(callParams);
15428 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15429 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15435 CallParams callParams;
15436 GetBasicParams(callParams);
15438 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15439 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15450 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15452 CallParams callParams;
15453 GetBasicParams(callParams);
15455 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15456 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15461 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15462 const VkMemoryRequirements& vkMemReq,
15466 CallParams callParams;
15467 GetBasicParams(callParams);
15469 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15470 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15471 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15473 vkMemReq.alignment,
15474 vkMemReq.memoryTypeBits,
15482 userDataStr.GetString());
15486 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15487 const VkMemoryRequirements& vkMemReq,
15489 uint64_t allocationCount,
15492 CallParams callParams;
15493 GetBasicParams(callParams);
15495 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15496 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15497 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15499 vkMemReq.alignment,
15500 vkMemReq.memoryTypeBits,
15507 PrintPointerList(allocationCount, pAllocations);
15508 fprintf(m_File,
",%s\n", userDataStr.GetString());
15512 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15513 const VkMemoryRequirements& vkMemReq,
15514 bool requiresDedicatedAllocation,
15515 bool prefersDedicatedAllocation,
15519 CallParams callParams;
15520 GetBasicParams(callParams);
15522 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15523 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15524 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15526 vkMemReq.alignment,
15527 vkMemReq.memoryTypeBits,
15528 requiresDedicatedAllocation ? 1 : 0,
15529 prefersDedicatedAllocation ? 1 : 0,
15537 userDataStr.GetString());
15541 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15542 const VkMemoryRequirements& vkMemReq,
15543 bool requiresDedicatedAllocation,
15544 bool prefersDedicatedAllocation,
15548 CallParams callParams;
15549 GetBasicParams(callParams);
15551 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15552 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15553 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15555 vkMemReq.alignment,
15556 vkMemReq.memoryTypeBits,
15557 requiresDedicatedAllocation ? 1 : 0,
15558 prefersDedicatedAllocation ? 1 : 0,
15566 userDataStr.GetString());
15570 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15573 CallParams callParams;
15574 GetBasicParams(callParams);
15576 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15577 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15582 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15583 uint64_t allocationCount,
15586 CallParams callParams;
15587 GetBasicParams(callParams);
15589 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15590 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15591 PrintPointerList(allocationCount, pAllocations);
15592 fprintf(m_File,
"\n");
15596 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15598 const void* pUserData)
15600 CallParams callParams;
15601 GetBasicParams(callParams);
15603 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15604 UserDataString userDataStr(
15607 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15609 userDataStr.GetString());
15613 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15616 CallParams callParams;
15617 GetBasicParams(callParams);
15619 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15620 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15625 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15628 CallParams callParams;
15629 GetBasicParams(callParams);
15631 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15632 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15637 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15640 CallParams callParams;
15641 GetBasicParams(callParams);
15643 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15644 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15649 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15650 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15652 CallParams callParams;
15653 GetBasicParams(callParams);
15655 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15656 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15663 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15664 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15666 CallParams callParams;
15667 GetBasicParams(callParams);
15669 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15670 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15677 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15678 const VkBufferCreateInfo& bufCreateInfo,
15682 CallParams callParams;
15683 GetBasicParams(callParams);
15685 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15686 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15687 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15688 bufCreateInfo.flags,
15689 bufCreateInfo.size,
15690 bufCreateInfo.usage,
15691 bufCreateInfo.sharingMode,
15692 allocCreateInfo.
flags,
15693 allocCreateInfo.
usage,
15697 allocCreateInfo.
pool,
15699 userDataStr.GetString());
15703 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15704 const VkImageCreateInfo& imageCreateInfo,
15708 CallParams callParams;
15709 GetBasicParams(callParams);
15711 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15712 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15713 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15714 imageCreateInfo.flags,
15715 imageCreateInfo.imageType,
15716 imageCreateInfo.format,
15717 imageCreateInfo.extent.width,
15718 imageCreateInfo.extent.height,
15719 imageCreateInfo.extent.depth,
15720 imageCreateInfo.mipLevels,
15721 imageCreateInfo.arrayLayers,
15722 imageCreateInfo.samples,
15723 imageCreateInfo.tiling,
15724 imageCreateInfo.usage,
15725 imageCreateInfo.sharingMode,
15726 imageCreateInfo.initialLayout,
15727 allocCreateInfo.
flags,
15728 allocCreateInfo.
usage,
15732 allocCreateInfo.
pool,
15734 userDataStr.GetString());
15738 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15741 CallParams callParams;
15742 GetBasicParams(callParams);
15744 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15745 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15750 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15753 CallParams callParams;
15754 GetBasicParams(callParams);
15756 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15757 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15762 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15765 CallParams callParams;
15766 GetBasicParams(callParams);
15768 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15769 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15774 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15777 CallParams callParams;
15778 GetBasicParams(callParams);
15780 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15781 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15786 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15789 CallParams callParams;
15790 GetBasicParams(callParams);
15792 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15793 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15798 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15802 CallParams callParams;
15803 GetBasicParams(callParams);
15805 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15806 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15809 fprintf(m_File,
",");
15811 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15821 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15824 CallParams callParams;
15825 GetBasicParams(callParams);
15827 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15828 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15833 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15837 CallParams callParams;
15838 GetBasicParams(callParams);
15840 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15841 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15842 pool, name != VMA_NULL ? name :
"");
15848 if(pUserData != VMA_NULL)
15852 m_Str = (
const char*)pUserData;
15857 snprintf(m_PtrStr, 17,
"%p", pUserData);
15867 void VmaRecorder::WriteConfiguration(
15868 const VkPhysicalDeviceProperties& devProps,
15869 const VkPhysicalDeviceMemoryProperties& memProps,
15870 uint32_t vulkanApiVersion,
15871 bool dedicatedAllocationExtensionEnabled,
15872 bool bindMemory2ExtensionEnabled,
15873 bool memoryBudgetExtensionEnabled,
15874 bool deviceCoherentMemoryExtensionEnabled)
15876 fprintf(m_File,
"Config,Begin\n");
15878 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15880 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15881 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15882 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15883 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15884 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15885 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15887 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15888 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15889 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15891 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15892 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15894 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15895 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15897 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15898 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15900 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15901 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15904 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15905 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15906 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15907 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15909 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15910 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15911 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15912 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15913 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15914 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15915 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15916 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15917 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15919 fprintf(m_File,
"Config,End\n");
15922 void VmaRecorder::GetBasicParams(CallParams& outParams)
15924 #if defined(_WIN32)
15925 outParams.threadId = GetCurrentThreadId();
15930 std::thread::id thread_id = std::this_thread::get_id();
15931 std::stringstream thread_id_to_string_converter;
15932 thread_id_to_string_converter << thread_id;
15933 std::string thread_id_as_string = thread_id_to_string_converter.str();
15934 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15937 auto current_time = std::chrono::high_resolution_clock::now();
15939 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15942 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15946 fprintf(m_File,
"%p", pItems[0]);
15947 for(uint64_t i = 1; i < count; ++i)
15949 fprintf(m_File,
" %p", pItems[i]);
15954 void VmaRecorder::Flush()
15967 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15968 m_Allocator(pAllocationCallbacks, 1024)
15972 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15974 VmaMutexLock mutexLock(m_Mutex);
15975 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15978 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15980 VmaMutexLock mutexLock(m_Mutex);
15981 m_Allocator.Free(hAlloc);
15989 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15996 m_hDevice(pCreateInfo->device),
15997 m_hInstance(pCreateInfo->instance),
15998 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15999 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
16000 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
16001 m_AllocationObjectAllocator(&m_AllocationCallbacks),
16002 m_HeapSizeLimitMask(0),
16003 m_DeviceMemoryCount(0),
16004 m_PreferredLargeHeapBlockSize(0),
16005 m_PhysicalDevice(pCreateInfo->physicalDevice),
16006 m_CurrentFrameIndex(0),
16007 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
16009 m_GlobalMemoryTypeBits(UINT32_MAX)
16011 ,m_pRecorder(VMA_NULL)
16014 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16016 m_UseKhrDedicatedAllocation =
false;
16017 m_UseKhrBindMemory2 =
false;
16020 if(VMA_DEBUG_DETECT_CORRUPTION)
16023 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
16028 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
16030 #if !(VMA_DEDICATED_ALLOCATION)
16033 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
16036 #if !(VMA_BIND_MEMORY2)
16039 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
16043 #if !(VMA_MEMORY_BUDGET)
16046 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
16049 #if !(VMA_BUFFER_DEVICE_ADDRESS)
16050 if(m_UseKhrBufferDeviceAddress)
16052 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16055 #if VMA_VULKAN_VERSION < 1002000
16056 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
16058 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
16061 #if VMA_VULKAN_VERSION < 1001000
16062 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16064 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
16067 #if !(VMA_MEMORY_PRIORITY)
16068 if(m_UseExtMemoryPriority)
16070 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16074 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
16075 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
16076 memset(&m_MemProps, 0,
sizeof(m_MemProps));
16078 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
16079 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
16090 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
16091 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
16093 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
16094 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
16095 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
16096 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
16101 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
16105 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16107 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
16108 if(limit != VK_WHOLE_SIZE)
16110 m_HeapSizeLimitMask |= 1u << heapIndex;
16111 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
16113 m_MemProps.memoryHeaps[heapIndex].size = limit;
16119 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16121 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
16123 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
16127 preferredBlockSize,
16130 GetBufferImageGranularity(),
16142 VkResult res = VK_SUCCESS;
16147 #if VMA_RECORDING_ENABLED
16148 m_pRecorder = vma_new(
this, VmaRecorder)();
16150 if(res != VK_SUCCESS)
16154 m_pRecorder->WriteConfiguration(
16155 m_PhysicalDeviceProperties,
16157 m_VulkanApiVersion,
16158 m_UseKhrDedicatedAllocation,
16159 m_UseKhrBindMemory2,
16160 m_UseExtMemoryBudget,
16161 m_UseAmdDeviceCoherentMemory);
16162 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
16164 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
16165 return VK_ERROR_FEATURE_NOT_PRESENT;
16169 #if VMA_MEMORY_BUDGET
16170 if(m_UseExtMemoryBudget)
16172 UpdateVulkanBudget();
16179 VmaAllocator_T::~VmaAllocator_T()
16181 #if VMA_RECORDING_ENABLED
16182 if(m_pRecorder != VMA_NULL)
16184 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
16185 vma_delete(
this, m_pRecorder);
16189 VMA_ASSERT(m_Pools.IsEmpty());
16191 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
16193 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
16195 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
16198 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
16202 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
16204 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16205 ImportVulkanFunctions_Static();
16208 if(pVulkanFunctions != VMA_NULL)
16210 ImportVulkanFunctions_Custom(pVulkanFunctions);
16213 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16214 ImportVulkanFunctions_Dynamic();
16217 ValidateVulkanFunctions();
16220 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16222 void VmaAllocator_T::ImportVulkanFunctions_Static()
16225 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
16226 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
16227 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
16228 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
16229 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
16230 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
16231 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
16232 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
16233 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
16234 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
16235 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
16236 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
16237 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
16238 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
16239 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
16240 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
16241 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
16244 #if VMA_VULKAN_VERSION >= 1001000
16245 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16247 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
16248 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
16249 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
16250 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
16251 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
16258 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
16260 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
16262 #define VMA_COPY_IF_NOT_NULL(funcName) \
16263 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
16265 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
16266 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
16267 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
16268 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
16269 VMA_COPY_IF_NOT_NULL(vkMapMemory);
16270 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
16271 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
16272 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
16273 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
16274 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
16275 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
16276 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
16277 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
16278 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
16279 VMA_COPY_IF_NOT_NULL(vkCreateImage);
16280 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
16281 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
16283 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16284 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
16285 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
16288 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16289 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
16290 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
16293 #if VMA_MEMORY_BUDGET
16294 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
16297 #undef VMA_COPY_IF_NOT_NULL
16300 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16302 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
16304 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
16305 if(m_VulkanFunctions.memberName == VMA_NULL) \
16306 m_VulkanFunctions.memberName = \
16307 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
16308 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
16309 if(m_VulkanFunctions.memberName == VMA_NULL) \
16310 m_VulkanFunctions.memberName = \
16311 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
16313 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
16314 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
16315 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
16316 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
16317 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
16318 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
16319 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
16320 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
16321 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
16322 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
16323 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
16324 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
16325 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
16326 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
16327 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
16328 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
16329 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16331 #if VMA_VULKAN_VERSION >= 1001000
16332 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16334 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16335 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16336 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16337 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16338 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16342 #if VMA_DEDICATED_ALLOCATION
16343 if(m_UseKhrDedicatedAllocation)
16345 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16346 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16350 #if VMA_BIND_MEMORY2
16351 if(m_UseKhrBindMemory2)
16353 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16354 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16358 #if VMA_MEMORY_BUDGET
16359 if(m_UseExtMemoryBudget)
16361 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16365 #undef VMA_FETCH_DEVICE_FUNC
16366 #undef VMA_FETCH_INSTANCE_FUNC
16371 void VmaAllocator_T::ValidateVulkanFunctions()
16373 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16374 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16375 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16376 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16377 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16378 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16379 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16380 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16381 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16382 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16383 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16384 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16385 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16386 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16387 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16388 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16389 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16391 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16392 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16394 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16395 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16399 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16400 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16402 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16403 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16407 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16408 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16410 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16415 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16417 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16418 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16419 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16420 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16423 VkResult VmaAllocator_T::AllocateMemoryOfType(
16425 VkDeviceSize alignment,
16426 bool dedicatedAllocation,
16427 VkBuffer dedicatedBuffer,
16428 VkBufferUsageFlags dedicatedBufferUsage,
16429 VkImage dedicatedImage,
16431 uint32_t memTypeIndex,
16432 VmaSuballocationType suballocType,
16433 size_t allocationCount,
16436 VMA_ASSERT(pAllocations != VMA_NULL);
16437 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16443 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16453 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16454 VMA_ASSERT(blockVector);
16456 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16457 bool preferDedicatedMemory =
16458 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16459 dedicatedAllocation ||
16461 size > preferredBlockSize / 2;
16463 if(preferDedicatedMemory &&
16465 finalCreateInfo.
pool == VK_NULL_HANDLE)
16474 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16478 return AllocateDedicatedMemory(
16488 dedicatedBufferUsage,
16496 VkResult res = blockVector->Allocate(
16497 m_CurrentFrameIndex.load(),
16504 if(res == VK_SUCCESS)
16512 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16518 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
16520 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16523 res = AllocateDedicatedMemory(
16533 dedicatedBufferUsage,
16537 if(res == VK_SUCCESS)
16540 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16546 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16552 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16554 VmaSuballocationType suballocType,
16555 uint32_t memTypeIndex,
16558 bool isUserDataString,
16561 VkBuffer dedicatedBuffer,
16562 VkBufferUsageFlags dedicatedBufferUsage,
16563 VkImage dedicatedImage,
16564 size_t allocationCount,
16567 VMA_ASSERT(allocationCount > 0 && pAllocations);
16571 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16573 GetBudget(&heapBudget, heapIndex, 1);
16574 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16576 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16580 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16581 allocInfo.memoryTypeIndex = memTypeIndex;
16582 allocInfo.allocationSize = size;
16584 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16585 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16586 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16588 if(dedicatedBuffer != VK_NULL_HANDLE)
16590 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16591 dedicatedAllocInfo.buffer = dedicatedBuffer;
16592 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16594 else if(dedicatedImage != VK_NULL_HANDLE)
16596 dedicatedAllocInfo.image = dedicatedImage;
16597 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16602 #if VMA_BUFFER_DEVICE_ADDRESS
16603 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16604 if(m_UseKhrBufferDeviceAddress)
16606 bool canContainBufferWithDeviceAddress =
true;
16607 if(dedicatedBuffer != VK_NULL_HANDLE)
16609 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16610 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16612 else if(dedicatedImage != VK_NULL_HANDLE)
16614 canContainBufferWithDeviceAddress =
false;
16616 if(canContainBufferWithDeviceAddress)
16618 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16619 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16624 #if VMA_MEMORY_PRIORITY
16625 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
16626 if(m_UseExtMemoryPriority)
16628 priorityInfo.priority = priority;
16629 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
16634 VkResult res = VK_SUCCESS;
16635 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16637 res = AllocateDedicatedMemoryPage(
16645 pAllocations + allocIndex);
16646 if(res != VK_SUCCESS)
16652 if(res == VK_SUCCESS)
16656 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16657 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
16658 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16660 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
16664 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16669 while(allocIndex--)
16672 VkDeviceMemory hMemory = currAlloc->GetMemory();
16684 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16685 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16686 currAlloc->SetUserData(
this, VMA_NULL);
16687 m_AllocationObjectAllocator.Free(currAlloc);
16690 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16696 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16698 VmaSuballocationType suballocType,
16699 uint32_t memTypeIndex,
16700 const VkMemoryAllocateInfo& allocInfo,
16702 bool isUserDataString,
16706 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16707 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16710 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16714 void* pMappedData = VMA_NULL;
16717 res = (*m_VulkanFunctions.vkMapMemory)(
16726 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16727 FreeVulkanMemory(memTypeIndex, size, hMemory);
16732 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16733 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16734 (*pAllocation)->SetUserData(
this, pUserData);
16735 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16736 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16738 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16744 void VmaAllocator_T::GetBufferMemoryRequirements(
16746 VkMemoryRequirements& memReq,
16747 bool& requiresDedicatedAllocation,
16748 bool& prefersDedicatedAllocation)
const
16750 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16751 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16753 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16754 memReqInfo.buffer = hBuffer;
16756 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16758 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16759 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16761 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16763 memReq = memReq2.memoryRequirements;
16764 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16765 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16770 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16771 requiresDedicatedAllocation =
false;
16772 prefersDedicatedAllocation =
false;
16776 void VmaAllocator_T::GetImageMemoryRequirements(
16778 VkMemoryRequirements& memReq,
16779 bool& requiresDedicatedAllocation,
16780 bool& prefersDedicatedAllocation)
const
16782 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16783 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16785 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16786 memReqInfo.image = hImage;
16788 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16790 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16791 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16793 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16795 memReq = memReq2.memoryRequirements;
16796 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16797 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16802 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16803 requiresDedicatedAllocation =
false;
16804 prefersDedicatedAllocation =
false;
16808 VkResult VmaAllocator_T::AllocateMemory(
16809 const VkMemoryRequirements& vkMemReq,
16810 bool requiresDedicatedAllocation,
16811 bool prefersDedicatedAllocation,
16812 VkBuffer dedicatedBuffer,
16813 VkBufferUsageFlags dedicatedBufferUsage,
16814 VkImage dedicatedImage,
16816 VmaSuballocationType suballocType,
16817 size_t allocationCount,
16820 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16822 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16824 if(vkMemReq.size == 0)
16826 return VK_ERROR_VALIDATION_FAILED_EXT;
16831 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16832 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16837 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16838 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16840 if(requiresDedicatedAllocation)
16844 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16845 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16847 if(createInfo.
pool != VK_NULL_HANDLE)
16849 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16850 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16853 if((createInfo.
pool != VK_NULL_HANDLE) &&
16856 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16857 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16860 if(createInfo.
pool != VK_NULL_HANDLE)
16862 const VkDeviceSize alignmentForPool = VMA_MAX(
16863 vkMemReq.alignment,
16864 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16869 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16874 return createInfo.
pool->m_BlockVector.Allocate(
16875 m_CurrentFrameIndex.load(),
16886 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16887 uint32_t memTypeIndex = UINT32_MAX;
16889 if(res == VK_SUCCESS)
16891 VkDeviceSize alignmentForMemType = VMA_MAX(
16892 vkMemReq.alignment,
16893 GetMemoryTypeMinAlignment(memTypeIndex));
16895 res = AllocateMemoryOfType(
16897 alignmentForMemType,
16898 requiresDedicatedAllocation || prefersDedicatedAllocation,
16900 dedicatedBufferUsage,
16908 if(res == VK_SUCCESS)
16918 memoryTypeBits &= ~(1u << memTypeIndex);
16921 if(res == VK_SUCCESS)
16923 alignmentForMemType = VMA_MAX(
16924 vkMemReq.alignment,
16925 GetMemoryTypeMinAlignment(memTypeIndex));
16927 res = AllocateMemoryOfType(
16929 alignmentForMemType,
16930 requiresDedicatedAllocation || prefersDedicatedAllocation,
16932 dedicatedBufferUsage,
16940 if(res == VK_SUCCESS)
16950 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16961 void VmaAllocator_T::FreeMemory(
16962 size_t allocationCount,
16965 VMA_ASSERT(pAllocations);
16967 for(
size_t allocIndex = allocationCount; allocIndex--; )
16971 if(allocation != VK_NULL_HANDLE)
16973 if(TouchAllocation(allocation))
16975 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16977 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16980 switch(allocation->GetType())
16982 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16984 VmaBlockVector* pBlockVector = VMA_NULL;
16985 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16986 if(hPool != VK_NULL_HANDLE)
16988 pBlockVector = &hPool->m_BlockVector;
16992 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16993 pBlockVector = m_pBlockVectors[memTypeIndex];
16995 pBlockVector->Free(allocation);
16998 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16999 FreeDedicatedMemory(allocation);
17007 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
17008 allocation->SetUserData(
this, VMA_NULL);
17009 m_AllocationObjectAllocator.Free(allocation);
17014 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
17017 InitStatInfo(pStats->
total);
17018 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
17020 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
17024 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17026 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17027 VMA_ASSERT(pBlockVector);
17028 pBlockVector->AddStats(pStats);
17033 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17034 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17036 pool->m_BlockVector.AddStats(pStats);
17041 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17043 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
17044 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17045 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17047 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
17050 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
17051 VmaAddStatInfo(pStats->
total, allocationStatInfo);
17052 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
17053 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
17058 VmaPostprocessCalcStatInfo(pStats->
total);
17059 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
17060 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
17061 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
17062 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
17065 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
17067 #if VMA_MEMORY_BUDGET
17068 if(m_UseExtMemoryBudget)
17070 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
17072 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
17073 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17075 const uint32_t heapIndex = firstHeap + i;
17077 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17080 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
17082 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
17083 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17087 outBudget->
usage = 0;
17091 outBudget->
budget = VMA_MIN(
17092 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
17097 UpdateVulkanBudget();
17098 GetBudget(outBudget, firstHeap, heapCount);
17104 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17106 const uint32_t heapIndex = firstHeap + i;
17108 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17112 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17117 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
17119 VkResult VmaAllocator_T::DefragmentationBegin(
17129 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
17130 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
17133 (*pContext)->AddAllocations(
17136 VkResult res = (*pContext)->Defragment(
17141 if(res != VK_NOT_READY)
17143 vma_delete(
this, *pContext);
17144 *pContext = VMA_NULL;
17150 VkResult VmaAllocator_T::DefragmentationEnd(
17153 vma_delete(
this, context);
17157 VkResult VmaAllocator_T::DefragmentationPassBegin(
17161 return context->DefragmentPassBegin(pInfo);
17163 VkResult VmaAllocator_T::DefragmentationPassEnd(
17166 return context->DefragmentPassEnd();
17172 if(hAllocation->CanBecomeLost())
17178 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17179 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17182 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17186 pAllocationInfo->
offset = 0;
17187 pAllocationInfo->
size = hAllocation->GetSize();
17189 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17192 else if(localLastUseFrameIndex == localCurrFrameIndex)
17194 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17195 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17196 pAllocationInfo->
offset = hAllocation->GetOffset();
17197 pAllocationInfo->
size = hAllocation->GetSize();
17199 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17204 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17206 localLastUseFrameIndex = localCurrFrameIndex;
17213 #if VMA_STATS_STRING_ENABLED
17214 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17215 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17218 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17219 if(localLastUseFrameIndex == localCurrFrameIndex)
17225 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17227 localLastUseFrameIndex = localCurrFrameIndex;
17233 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17234 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17235 pAllocationInfo->
offset = hAllocation->GetOffset();
17236 pAllocationInfo->
size = hAllocation->GetSize();
17237 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
17238 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17242 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
17245 if(hAllocation->CanBecomeLost())
17247 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17248 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17251 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17255 else if(localLastUseFrameIndex == localCurrFrameIndex)
17261 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17263 localLastUseFrameIndex = localCurrFrameIndex;
17270 #if VMA_STATS_STRING_ENABLED
17271 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17272 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17275 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17276 if(localLastUseFrameIndex == localCurrFrameIndex)
17282 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17284 localLastUseFrameIndex = localCurrFrameIndex;
17296 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
17306 return VK_ERROR_INITIALIZATION_FAILED;
17310 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
17312 return VK_ERROR_FEATURE_NOT_PRESENT;
17315 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
17317 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
17319 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
17320 if(res != VK_SUCCESS)
17322 vma_delete(
this, *pPool);
17329 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17330 (*pPool)->SetId(m_NextPoolId++);
17331 m_Pools.PushBack(*pPool);
17337 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17341 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17342 m_Pools.Remove(pool);
17345 vma_delete(
this, pool);
17350 pool->m_BlockVector.GetPoolStats(pPoolStats);
17353 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17355 m_CurrentFrameIndex.store(frameIndex);
17357 #if VMA_MEMORY_BUDGET
17358 if(m_UseExtMemoryBudget)
17360 UpdateVulkanBudget();
17365 void VmaAllocator_T::MakePoolAllocationsLost(
17367 size_t* pLostAllocationCount)
17369 hPool->m_BlockVector.MakePoolAllocationsLost(
17370 m_CurrentFrameIndex.load(),
17371 pLostAllocationCount);
17374 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17376 return hPool->m_BlockVector.CheckCorruption();
17379 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17381 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17384 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17386 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17388 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17389 VMA_ASSERT(pBlockVector);
17390 VkResult localRes = pBlockVector->CheckCorruption();
17393 case VK_ERROR_FEATURE_NOT_PRESENT:
17396 finalRes = VK_SUCCESS;
17406 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17407 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17409 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17411 VkResult localRes = pool->m_BlockVector.CheckCorruption();
17414 case VK_ERROR_FEATURE_NOT_PRESENT:
17417 finalRes = VK_SUCCESS;
17429 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17431 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17432 (*pAllocation)->InitLost();
17436 template<
typename T>
17437 struct AtomicTransactionalIncrement
17440 typedef std::atomic<T> AtomicT;
17441 ~AtomicTransactionalIncrement()
17446 T Increment(AtomicT* atomic)
17449 return m_Atomic->fetch_add(1);
17453 m_Atomic =
nullptr;
17457 AtomicT* m_Atomic =
nullptr;
17460 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17462 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
17463 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
17464 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
17465 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
17467 return VK_ERROR_TOO_MANY_OBJECTS;
17471 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17474 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17476 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17477 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17480 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17481 if(blockBytesAfterAllocation > heapSize)
17483 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17485 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17493 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17497 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17499 if(res == VK_SUCCESS)
17501 #if VMA_MEMORY_BUDGET
17502 ++m_Budget.m_OperationsSinceBudgetFetch;
17506 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17508 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17511 deviceMemoryCountIncrement.Commit();
17515 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17521 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17524 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17526 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17530 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17532 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17534 --m_DeviceMemoryCount;
17537 VkResult VmaAllocator_T::BindVulkanBuffer(
17538 VkDeviceMemory memory,
17539 VkDeviceSize memoryOffset,
17543 if(pNext != VMA_NULL)
17545 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17546 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17547 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17549 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17550 bindBufferMemoryInfo.pNext = pNext;
17551 bindBufferMemoryInfo.buffer = buffer;
17552 bindBufferMemoryInfo.memory = memory;
17553 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17554 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17559 return VK_ERROR_EXTENSION_NOT_PRESENT;
17564 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17568 VkResult VmaAllocator_T::BindVulkanImage(
17569 VkDeviceMemory memory,
17570 VkDeviceSize memoryOffset,
17574 if(pNext != VMA_NULL)
17576 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17577 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17578 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17580 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17581 bindBufferMemoryInfo.pNext = pNext;
17582 bindBufferMemoryInfo.image = image;
17583 bindBufferMemoryInfo.memory = memory;
17584 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17585 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17590 return VK_ERROR_EXTENSION_NOT_PRESENT;
17595 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17599 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17601 if(hAllocation->CanBecomeLost())
17603 return VK_ERROR_MEMORY_MAP_FAILED;
17606 switch(hAllocation->GetType())
17608 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17610 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17611 char *pBytes = VMA_NULL;
17612 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17613 if(res == VK_SUCCESS)
17615 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17616 hAllocation->BlockAllocMap();
17620 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17621 return hAllocation->DedicatedAllocMap(
this, ppData);
17624 return VK_ERROR_MEMORY_MAP_FAILED;
17630 switch(hAllocation->GetType())
17632 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17634 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17635 hAllocation->BlockAllocUnmap();
17636 pBlock->Unmap(
this, 1);
17639 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17640 hAllocation->DedicatedAllocUnmap(
this);
17647 VkResult VmaAllocator_T::BindBufferMemory(
17649 VkDeviceSize allocationLocalOffset,
17653 VkResult res = VK_SUCCESS;
17654 switch(hAllocation->GetType())
17656 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17657 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17659 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17661 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17662 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17663 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17672 VkResult VmaAllocator_T::BindImageMemory(
17674 VkDeviceSize allocationLocalOffset,
17678 VkResult res = VK_SUCCESS;
17679 switch(hAllocation->GetType())
17681 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17682 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17684 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17686 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17687 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17688 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17697 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17699 VkDeviceSize offset, VkDeviceSize size,
17700 VMA_CACHE_OPERATION op)
17702 VkResult res = VK_SUCCESS;
17704 VkMappedMemoryRange memRange = {};
17705 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17709 case VMA_CACHE_FLUSH:
17710 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17712 case VMA_CACHE_INVALIDATE:
17713 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17723 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17724 uint32_t allocationCount,
17726 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17727 VMA_CACHE_OPERATION op)
17729 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17730 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17731 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17733 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17736 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17737 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17738 VkMappedMemoryRange newRange;
17739 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17741 ranges.push_back(newRange);
17745 VkResult res = VK_SUCCESS;
17746 if(!ranges.empty())
17750 case VMA_CACHE_FLUSH:
17751 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17753 case VMA_CACHE_INVALIDATE:
17754 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17764 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17766 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17768 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17770 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17771 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
17772 dedicatedAllocations.Remove(allocation);
17775 VkDeviceMemory hMemory = allocation->GetMemory();
17787 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17789 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17792 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17794 VkBufferCreateInfo dummyBufCreateInfo;
17795 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17797 uint32_t memoryTypeBits = 0;
17800 VkBuffer buf = VK_NULL_HANDLE;
17801 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17802 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17803 if(res == VK_SUCCESS)
17806 VkMemoryRequirements memReq;
17807 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17808 memoryTypeBits = memReq.memoryTypeBits;
17811 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17814 return memoryTypeBits;
17817 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17820 VMA_ASSERT(GetMemoryTypeCount() > 0);
17822 uint32_t memoryTypeBits = UINT32_MAX;
17824 if(!m_UseAmdDeviceCoherentMemory)
17827 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17829 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17831 memoryTypeBits &= ~(1u << memTypeIndex);
17836 return memoryTypeBits;
17839 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17841 VkDeviceSize offset, VkDeviceSize size,
17842 VkMappedMemoryRange& outRange)
const
17844 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17845 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17847 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17848 const VkDeviceSize allocationSize = allocation->GetSize();
17849 VMA_ASSERT(offset <= allocationSize);
17851 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17852 outRange.pNext = VMA_NULL;
17853 outRange.memory = allocation->GetMemory();
17855 switch(allocation->GetType())
17857 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17858 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17859 if(size == VK_WHOLE_SIZE)
17861 outRange.size = allocationSize - outRange.offset;
17865 VMA_ASSERT(offset + size <= allocationSize);
17866 outRange.size = VMA_MIN(
17867 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17868 allocationSize - outRange.offset);
17871 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17874 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17875 if(size == VK_WHOLE_SIZE)
17877 size = allocationSize - offset;
17881 VMA_ASSERT(offset + size <= allocationSize);
17883 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17886 const VkDeviceSize allocationOffset = allocation->GetOffset();
17887 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17888 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17889 outRange.offset += allocationOffset;
17890 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17902 #if VMA_MEMORY_BUDGET
17904 void VmaAllocator_T::UpdateVulkanBudget()
17906 VMA_ASSERT(m_UseExtMemoryBudget);
17908 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17910 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17911 VmaPnextChainPushFront(&memProps, &budgetProps);
17913 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17916 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17918 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17920 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17921 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17922 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17925 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17927 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17929 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17931 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17933 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17935 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17938 m_Budget.m_OperationsSinceBudgetFetch = 0;
17944 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17946 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17947 !hAllocation->CanBecomeLost() &&
17948 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17950 void* pData = VMA_NULL;
17951 VkResult res = Map(hAllocation, &pData);
17952 if(res == VK_SUCCESS)
17954 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17955 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17956 Unmap(hAllocation);
17960 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17965 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17967 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17968 if(memoryTypeBits == UINT32_MAX)
17970 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17971 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17973 return memoryTypeBits;
17976 #if VMA_STATS_STRING_ENABLED
17978 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17980 bool dedicatedAllocationsStarted =
false;
17981 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17983 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17984 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17985 if(!dedicatedAllocList.IsEmpty())
17987 if(dedicatedAllocationsStarted ==
false)
17989 dedicatedAllocationsStarted =
true;
17990 json.WriteString(
"DedicatedAllocations");
17991 json.BeginObject();
17994 json.BeginString(
"Type ");
17995 json.ContinueString(memTypeIndex);
18001 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
18003 json.BeginObject(
true);
18004 alloc->PrintParameters(json);
18011 if(dedicatedAllocationsStarted)
18017 bool allocationsStarted =
false;
18018 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18020 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
18022 if(allocationsStarted ==
false)
18024 allocationsStarted =
true;
18025 json.WriteString(
"DefaultPools");
18026 json.BeginObject();
18029 json.BeginString(
"Type ");
18030 json.ContinueString(memTypeIndex);
18033 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
18036 if(allocationsStarted)
18044 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
18045 if(!m_Pools.IsEmpty())
18047 json.WriteString(
"Pools");
18048 json.BeginObject();
18049 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
18051 json.BeginString();
18052 json.ContinueString(pool->GetId());
18055 pool->m_BlockVector.PrintDetailedMap(json);
18071 VMA_ASSERT(pCreateInfo && pAllocator);
18074 VMA_DEBUG_LOG(
"vmaCreateAllocator");
18076 return (*pAllocator)->Init(pCreateInfo);
18082 if(allocator != VK_NULL_HANDLE)
18084 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
18085 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
18086 vma_delete(&allocationCallbacks, allocator);
18092 VMA_ASSERT(allocator && pAllocatorInfo);
18093 pAllocatorInfo->
instance = allocator->m_hInstance;
18094 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
18095 pAllocatorInfo->
device = allocator->m_hDevice;
18100 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
18102 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
18103 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
18108 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
18110 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
18111 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
18116 uint32_t memoryTypeIndex,
18117 VkMemoryPropertyFlags* pFlags)
18119 VMA_ASSERT(allocator && pFlags);
18120 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
18121 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
18126 uint32_t frameIndex)
18128 VMA_ASSERT(allocator);
18129 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
18131 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18133 allocator->SetCurrentFrameIndex(frameIndex);
18140 VMA_ASSERT(allocator && pStats);
18141 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18142 allocator->CalculateStats(pStats);
18149 VMA_ASSERT(allocator && pBudget);
18150 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18151 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
18154 #if VMA_STATS_STRING_ENABLED
18158 char** ppStatsString,
18159 VkBool32 detailedMap)
18161 VMA_ASSERT(allocator && ppStatsString);
18162 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18164 VmaStringBuilder sb(allocator);
18166 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
18167 json.BeginObject();
18170 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
18173 allocator->CalculateStats(&stats);
18175 json.WriteString(
"Total");
18176 VmaPrintStatInfo(json, stats.
total);
18178 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
18180 json.BeginString(
"Heap ");
18181 json.ContinueString(heapIndex);
18183 json.BeginObject();
18185 json.WriteString(
"Size");
18186 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
18188 json.WriteString(
"Flags");
18189 json.BeginArray(
true);
18190 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
18192 json.WriteString(
"DEVICE_LOCAL");
18196 json.WriteString(
"Budget");
18197 json.BeginObject();
18199 json.WriteString(
"BlockBytes");
18200 json.WriteNumber(budget[heapIndex].blockBytes);
18201 json.WriteString(
"AllocationBytes");
18202 json.WriteNumber(budget[heapIndex].allocationBytes);
18203 json.WriteString(
"Usage");
18204 json.WriteNumber(budget[heapIndex].usage);
18205 json.WriteString(
"Budget");
18206 json.WriteNumber(budget[heapIndex].budget);
18212 json.WriteString(
"Stats");
18213 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
18216 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
18218 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
18220 json.BeginString(
"Type ");
18221 json.ContinueString(typeIndex);
18224 json.BeginObject();
18226 json.WriteString(
"Flags");
18227 json.BeginArray(
true);
18228 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
18229 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
18231 json.WriteString(
"DEVICE_LOCAL");
18233 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18235 json.WriteString(
"HOST_VISIBLE");
18237 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
18239 json.WriteString(
"HOST_COHERENT");
18241 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
18243 json.WriteString(
"HOST_CACHED");
18245 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
18247 json.WriteString(
"LAZILY_ALLOCATED");
18249 #if VMA_VULKAN_VERSION >= 1001000
18250 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
18252 json.WriteString(
"PROTECTED");
18255 #if VK_AMD_device_coherent_memory
18256 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
18258 json.WriteString(
"DEVICE_COHERENT");
18260 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
18262 json.WriteString(
"DEVICE_UNCACHED");
18269 json.WriteString(
"Stats");
18270 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
18279 if(detailedMap == VK_TRUE)
18281 allocator->PrintDetailedMap(json);
18287 const size_t len = sb.GetLength();
18288 char*
const pChars = vma_new_array(allocator,
char, len + 1);
18291 memcpy(pChars, sb.GetData(), len);
18293 pChars[len] =
'\0';
18294 *ppStatsString = pChars;
18299 char* pStatsString)
18301 if(pStatsString != VMA_NULL)
18303 VMA_ASSERT(allocator);
18304 size_t len = strlen(pStatsString);
18305 vma_delete_array(allocator, pStatsString, len + 1);
18316 uint32_t memoryTypeBits,
18318 uint32_t* pMemoryTypeIndex)
18320 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18321 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18322 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18324 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
18331 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
18332 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
18333 uint32_t notPreferredFlags = 0;
18336 switch(pAllocationCreateInfo->
usage)
18341 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18343 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18347 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
18350 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18351 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18353 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18357 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18358 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
18361 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18364 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18373 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18375 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18378 *pMemoryTypeIndex = UINT32_MAX;
18379 uint32_t minCost = UINT32_MAX;
18380 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18381 memTypeIndex < allocator->GetMemoryTypeCount();
18382 ++memTypeIndex, memTypeBit <<= 1)
18385 if((memTypeBit & memoryTypeBits) != 0)
18387 const VkMemoryPropertyFlags currFlags =
18388 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18390 if((requiredFlags & ~currFlags) == 0)
18393 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18394 VmaCountBitsSet(currFlags & notPreferredFlags);
18396 if(currCost < minCost)
18398 *pMemoryTypeIndex = memTypeIndex;
18403 minCost = currCost;
18408 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18413 const VkBufferCreateInfo* pBufferCreateInfo,
18415 uint32_t* pMemoryTypeIndex)
18417 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18418 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18419 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18420 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18422 const VkDevice hDev = allocator->m_hDevice;
18423 VkBuffer hBuffer = VK_NULL_HANDLE;
18424 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18425 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18426 if(res == VK_SUCCESS)
18428 VkMemoryRequirements memReq = {};
18429 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18430 hDev, hBuffer, &memReq);
18434 memReq.memoryTypeBits,
18435 pAllocationCreateInfo,
18438 allocator->GetVulkanFunctions().vkDestroyBuffer(
18439 hDev, hBuffer, allocator->GetAllocationCallbacks());
18446 const VkImageCreateInfo* pImageCreateInfo,
18448 uint32_t* pMemoryTypeIndex)
18450 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18451 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18452 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18453 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18455 const VkDevice hDev = allocator->m_hDevice;
18456 VkImage hImage = VK_NULL_HANDLE;
18457 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18458 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18459 if(res == VK_SUCCESS)
18461 VkMemoryRequirements memReq = {};
18462 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18463 hDev, hImage, &memReq);
18467 memReq.memoryTypeBits,
18468 pAllocationCreateInfo,
18471 allocator->GetVulkanFunctions().vkDestroyImage(
18472 hDev, hImage, allocator->GetAllocationCallbacks());
18482 VMA_ASSERT(allocator && pCreateInfo && pPool);
18484 VMA_DEBUG_LOG(
"vmaCreatePool");
18486 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18488 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18490 #if VMA_RECORDING_ENABLED
18491 if(allocator->GetRecorder() != VMA_NULL)
18493 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18504 VMA_ASSERT(allocator);
18506 if(pool == VK_NULL_HANDLE)
18511 VMA_DEBUG_LOG(
"vmaDestroyPool");
18513 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18515 #if VMA_RECORDING_ENABLED
18516 if(allocator->GetRecorder() != VMA_NULL)
18518 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18522 allocator->DestroyPool(pool);
18530 VMA_ASSERT(allocator && pool && pPoolStats);
18532 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18534 allocator->GetPoolStats(pool, pPoolStats);
18540 size_t* pLostAllocationCount)
18542 VMA_ASSERT(allocator && pool);
18544 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18546 #if VMA_RECORDING_ENABLED
18547 if(allocator->GetRecorder() != VMA_NULL)
18549 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18553 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18558 VMA_ASSERT(allocator && pool);
18560 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18562 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18564 return allocator->CheckPoolCorruption(pool);
18570 const char** ppName)
18572 VMA_ASSERT(allocator && pool && ppName);
18574 VMA_DEBUG_LOG(
"vmaGetPoolName");
18576 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18578 *ppName = pool->GetName();
18586 VMA_ASSERT(allocator && pool);
18588 VMA_DEBUG_LOG(
"vmaSetPoolName");
18590 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18592 pool->SetName(pName);
18594 #if VMA_RECORDING_ENABLED
18595 if(allocator->GetRecorder() != VMA_NULL)
18597 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18604 const VkMemoryRequirements* pVkMemoryRequirements,
18609 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18611 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18613 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18615 VkResult result = allocator->AllocateMemory(
18616 *pVkMemoryRequirements,
18623 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18627 #if VMA_RECORDING_ENABLED
18628 if(allocator->GetRecorder() != VMA_NULL)
18630 allocator->GetRecorder()->RecordAllocateMemory(
18631 allocator->GetCurrentFrameIndex(),
18632 *pVkMemoryRequirements,
18638 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18640 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18648 const VkMemoryRequirements* pVkMemoryRequirements,
18650 size_t allocationCount,
18654 if(allocationCount == 0)
18659 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18661 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18663 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18665 VkResult result = allocator->AllocateMemory(
18666 *pVkMemoryRequirements,
18673 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18677 #if VMA_RECORDING_ENABLED
18678 if(allocator->GetRecorder() != VMA_NULL)
18680 allocator->GetRecorder()->RecordAllocateMemoryPages(
18681 allocator->GetCurrentFrameIndex(),
18682 *pVkMemoryRequirements,
18684 (uint64_t)allocationCount,
18689 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18691 for(
size_t i = 0; i < allocationCount; ++i)
18693 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18707 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18709 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18713 VkMemoryRequirements vkMemReq = {};
18714 bool requiresDedicatedAllocation =
false;
18715 bool prefersDedicatedAllocation =
false;
18716 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18717 requiresDedicatedAllocation,
18718 prefersDedicatedAllocation);
18720 VkResult result = allocator->AllocateMemory(
18722 requiresDedicatedAllocation,
18723 prefersDedicatedAllocation,
18728 VMA_SUBALLOCATION_TYPE_BUFFER,
18732 #if VMA_RECORDING_ENABLED
18733 if(allocator->GetRecorder() != VMA_NULL)
18735 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18736 allocator->GetCurrentFrameIndex(),
18738 requiresDedicatedAllocation,
18739 prefersDedicatedAllocation,
18745 if(pAllocationInfo && result == VK_SUCCESS)
18747 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18760 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18762 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18764 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18766 VkMemoryRequirements vkMemReq = {};
18767 bool requiresDedicatedAllocation =
false;
18768 bool prefersDedicatedAllocation =
false;
18769 allocator->GetImageMemoryRequirements(image, vkMemReq,
18770 requiresDedicatedAllocation, prefersDedicatedAllocation);
18772 VkResult result = allocator->AllocateMemory(
18774 requiresDedicatedAllocation,
18775 prefersDedicatedAllocation,
18780 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18784 #if VMA_RECORDING_ENABLED
18785 if(allocator->GetRecorder() != VMA_NULL)
18787 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18788 allocator->GetCurrentFrameIndex(),
18790 requiresDedicatedAllocation,
18791 prefersDedicatedAllocation,
18797 if(pAllocationInfo && result == VK_SUCCESS)
18799 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18809 VMA_ASSERT(allocator);
18811 if(allocation == VK_NULL_HANDLE)
18816 VMA_DEBUG_LOG(
"vmaFreeMemory");
18818 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18820 #if VMA_RECORDING_ENABLED
18821 if(allocator->GetRecorder() != VMA_NULL)
18823 allocator->GetRecorder()->RecordFreeMemory(
18824 allocator->GetCurrentFrameIndex(),
18829 allocator->FreeMemory(
18836 size_t allocationCount,
18839 if(allocationCount == 0)
18844 VMA_ASSERT(allocator);
18846 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18848 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18850 #if VMA_RECORDING_ENABLED
18851 if(allocator->GetRecorder() != VMA_NULL)
18853 allocator->GetRecorder()->RecordFreeMemoryPages(
18854 allocator->GetCurrentFrameIndex(),
18855 (uint64_t)allocationCount,
18860 allocator->FreeMemory(allocationCount, pAllocations);
18868 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18870 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18872 #if VMA_RECORDING_ENABLED
18873 if(allocator->GetRecorder() != VMA_NULL)
18875 allocator->GetRecorder()->RecordGetAllocationInfo(
18876 allocator->GetCurrentFrameIndex(),
18881 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18888 VMA_ASSERT(allocator && allocation);
18890 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18892 #if VMA_RECORDING_ENABLED
18893 if(allocator->GetRecorder() != VMA_NULL)
18895 allocator->GetRecorder()->RecordTouchAllocation(
18896 allocator->GetCurrentFrameIndex(),
18901 return allocator->TouchAllocation(allocation);
18909 VMA_ASSERT(allocator && allocation);
18911 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18913 allocation->SetUserData(allocator, pUserData);
18915 #if VMA_RECORDING_ENABLED
18916 if(allocator->GetRecorder() != VMA_NULL)
18918 allocator->GetRecorder()->RecordSetAllocationUserData(
18919 allocator->GetCurrentFrameIndex(),
18930 VMA_ASSERT(allocator && pAllocation);
18932 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18934 allocator->CreateLostAllocation(pAllocation);
18936 #if VMA_RECORDING_ENABLED
18937 if(allocator->GetRecorder() != VMA_NULL)
18939 allocator->GetRecorder()->RecordCreateLostAllocation(
18940 allocator->GetCurrentFrameIndex(),
18951 VMA_ASSERT(allocator && allocation && ppData);
18953 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18955 VkResult res = allocator->Map(allocation, ppData);
18957 #if VMA_RECORDING_ENABLED
18958 if(allocator->GetRecorder() != VMA_NULL)
18960 allocator->GetRecorder()->RecordMapMemory(
18961 allocator->GetCurrentFrameIndex(),
18973 VMA_ASSERT(allocator && allocation);
18975 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18977 #if VMA_RECORDING_ENABLED
18978 if(allocator->GetRecorder() != VMA_NULL)
18980 allocator->GetRecorder()->RecordUnmapMemory(
18981 allocator->GetCurrentFrameIndex(),
18986 allocator->Unmap(allocation);
18991 VMA_ASSERT(allocator && allocation);
18993 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18995 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18997 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18999 #if VMA_RECORDING_ENABLED
19000 if(allocator->GetRecorder() != VMA_NULL)
19002 allocator->GetRecorder()->RecordFlushAllocation(
19003 allocator->GetCurrentFrameIndex(),
19004 allocation, offset, size);
19013 VMA_ASSERT(allocator && allocation);
19015 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
19017 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19019 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
19021 #if VMA_RECORDING_ENABLED
19022 if(allocator->GetRecorder() != VMA_NULL)
19024 allocator->GetRecorder()->RecordInvalidateAllocation(
19025 allocator->GetCurrentFrameIndex(),
19026 allocation, offset, size);
19035 uint32_t allocationCount,
19037 const VkDeviceSize* offsets,
19038 const VkDeviceSize* sizes)
19040 VMA_ASSERT(allocator);
19042 if(allocationCount == 0)
19047 VMA_ASSERT(allocations);
19049 VMA_DEBUG_LOG(
"vmaFlushAllocations");
19051 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19053 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
19055 #if VMA_RECORDING_ENABLED
19056 if(allocator->GetRecorder() != VMA_NULL)
19067 uint32_t allocationCount,
19069 const VkDeviceSize* offsets,
19070 const VkDeviceSize* sizes)
19072 VMA_ASSERT(allocator);
19074 if(allocationCount == 0)
19079 VMA_ASSERT(allocations);
19081 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
19083 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19085 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
19087 #if VMA_RECORDING_ENABLED
19088 if(allocator->GetRecorder() != VMA_NULL)
19099 VMA_ASSERT(allocator);
19101 VMA_DEBUG_LOG(
"vmaCheckCorruption");
19103 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19105 return allocator->CheckCorruption(memoryTypeBits);
19111 size_t allocationCount,
19112 VkBool32* pAllocationsChanged,
19122 if(pDefragmentationInfo != VMA_NULL)
19136 if(res == VK_NOT_READY)
19149 VMA_ASSERT(allocator && pInfo && pContext);
19160 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
19162 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
19164 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19166 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
19168 #if VMA_RECORDING_ENABLED
19169 if(allocator->GetRecorder() != VMA_NULL)
19171 allocator->GetRecorder()->RecordDefragmentationBegin(
19172 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
19183 VMA_ASSERT(allocator);
19185 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
19187 if(context != VK_NULL_HANDLE)
19189 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19191 #if VMA_RECORDING_ENABLED
19192 if(allocator->GetRecorder() != VMA_NULL)
19194 allocator->GetRecorder()->RecordDefragmentationEnd(
19195 allocator->GetCurrentFrameIndex(), context);
19199 return allocator->DefragmentationEnd(context);
19213 VMA_ASSERT(allocator);
19216 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
19218 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19220 if(context == VK_NULL_HANDLE)
19226 return allocator->DefragmentationPassBegin(pInfo, context);
19232 VMA_ASSERT(allocator);
19234 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
19235 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19237 if(context == VK_NULL_HANDLE)
19240 return allocator->DefragmentationPassEnd(context);
19248 VMA_ASSERT(allocator && allocation && buffer);
19250 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
19252 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19254 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
19260 VkDeviceSize allocationLocalOffset,
19264 VMA_ASSERT(allocator && allocation && buffer);
19266 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
19268 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19270 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
19278 VMA_ASSERT(allocator && allocation && image);
19280 VMA_DEBUG_LOG(
"vmaBindImageMemory");
19282 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19284 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
19290 VkDeviceSize allocationLocalOffset,
19294 VMA_ASSERT(allocator && allocation && image);
19296 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
19298 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19300 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
19305 const VkBufferCreateInfo* pBufferCreateInfo,
19311 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
19313 if(pBufferCreateInfo->size == 0)
19315 return VK_ERROR_VALIDATION_FAILED_EXT;
19317 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19318 !allocator->m_UseKhrBufferDeviceAddress)
19320 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19321 return VK_ERROR_VALIDATION_FAILED_EXT;
19324 VMA_DEBUG_LOG(
"vmaCreateBuffer");
19326 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19328 *pBuffer = VK_NULL_HANDLE;
19329 *pAllocation = VK_NULL_HANDLE;
19332 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19333 allocator->m_hDevice,
19335 allocator->GetAllocationCallbacks(),
19340 VkMemoryRequirements vkMemReq = {};
19341 bool requiresDedicatedAllocation =
false;
19342 bool prefersDedicatedAllocation =
false;
19343 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19344 requiresDedicatedAllocation, prefersDedicatedAllocation);
19347 res = allocator->AllocateMemory(
19349 requiresDedicatedAllocation,
19350 prefersDedicatedAllocation,
19352 pBufferCreateInfo->usage,
19354 *pAllocationCreateInfo,
19355 VMA_SUBALLOCATION_TYPE_BUFFER,
19359 #if VMA_RECORDING_ENABLED
19360 if(allocator->GetRecorder() != VMA_NULL)
19362 allocator->GetRecorder()->RecordCreateBuffer(
19363 allocator->GetCurrentFrameIndex(),
19364 *pBufferCreateInfo,
19365 *pAllocationCreateInfo,
19375 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19380 #if VMA_STATS_STRING_ENABLED
19381 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19383 if(pAllocationInfo != VMA_NULL)
19385 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19390 allocator->FreeMemory(
19393 *pAllocation = VK_NULL_HANDLE;
19394 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19395 *pBuffer = VK_NULL_HANDLE;
19398 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19399 *pBuffer = VK_NULL_HANDLE;
19410 VMA_ASSERT(allocator);
19412 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19417 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19419 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19421 #if VMA_RECORDING_ENABLED
19422 if(allocator->GetRecorder() != VMA_NULL)
19424 allocator->GetRecorder()->RecordDestroyBuffer(
19425 allocator->GetCurrentFrameIndex(),
19430 if(buffer != VK_NULL_HANDLE)
19432 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19435 if(allocation != VK_NULL_HANDLE)
19437 allocator->FreeMemory(
19445 const VkImageCreateInfo* pImageCreateInfo,
19451 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19453 if(pImageCreateInfo->extent.width == 0 ||
19454 pImageCreateInfo->extent.height == 0 ||
19455 pImageCreateInfo->extent.depth == 0 ||
19456 pImageCreateInfo->mipLevels == 0 ||
19457 pImageCreateInfo->arrayLayers == 0)
19459 return VK_ERROR_VALIDATION_FAILED_EXT;
19462 VMA_DEBUG_LOG(
"vmaCreateImage");
19464 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19466 *pImage = VK_NULL_HANDLE;
19467 *pAllocation = VK_NULL_HANDLE;
19470 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19471 allocator->m_hDevice,
19473 allocator->GetAllocationCallbacks(),
19477 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19478 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19479 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19482 VkMemoryRequirements vkMemReq = {};
19483 bool requiresDedicatedAllocation =
false;
19484 bool prefersDedicatedAllocation =
false;
19485 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19486 requiresDedicatedAllocation, prefersDedicatedAllocation);
19488 res = allocator->AllocateMemory(
19490 requiresDedicatedAllocation,
19491 prefersDedicatedAllocation,
19495 *pAllocationCreateInfo,
19500 #if VMA_RECORDING_ENABLED
19501 if(allocator->GetRecorder() != VMA_NULL)
19503 allocator->GetRecorder()->RecordCreateImage(
19504 allocator->GetCurrentFrameIndex(),
19506 *pAllocationCreateInfo,
19516 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19521 #if VMA_STATS_STRING_ENABLED
19522 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19524 if(pAllocationInfo != VMA_NULL)
19526 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19531 allocator->FreeMemory(
19534 *pAllocation = VK_NULL_HANDLE;
19535 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19536 *pImage = VK_NULL_HANDLE;
19539 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19540 *pImage = VK_NULL_HANDLE;
19551 VMA_ASSERT(allocator);
19553 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19558 VMA_DEBUG_LOG(
"vmaDestroyImage");
19560 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19562 #if VMA_RECORDING_ENABLED
19563 if(allocator->GetRecorder() != VMA_NULL)
19565 allocator->GetRecorder()->RecordDestroyImage(
19566 allocator->GetCurrentFrameIndex(),
19571 if(image != VK_NULL_HANDLE)
19573 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19575 if(allocation != VK_NULL_HANDLE)
19577 allocator->FreeMemory(
Definition: vk_mem_alloc.h:2879
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2905
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2911
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2897
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2918
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2892
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:2925
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2887
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2881
Represents single memory allocation.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:3229
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:3253
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:3273
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:3234
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:3264
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:3278
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:3243
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:2413
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:2418
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2444
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:2469
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:2415
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null.
Definition: vk_mem_alloc.h:2475
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:2427
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2487
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:2424
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:2482
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:2421
uint32_t vulkanApiVersion
Optional. The highest version of Vulkan that the application is designed to use.
Definition: vk_mem_alloc.h:2496
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:2430
Represents main object of this library initialized.
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:2511
VkDevice device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:2526
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2516
VkPhysicalDevice physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:2521
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
Definition: vk_mem_alloc.h:2617
VkDeviceSize blockBytes
Sum size of all VkDeviceMemory blocks allocated from particular heap, in bytes.
Definition: vk_mem_alloc.h:2620
VkDeviceSize allocationBytes
Sum size of all allocations created in particular heap, in bytes.
Definition: vk_mem_alloc.h:2631
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:2641
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:2652
Represents Opaque object that represents started defragmentation process.
Parameters for defragmentation.
Definition: vk_mem_alloc.h:3628
const VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:3668
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:3634
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:3688
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3683
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:3631
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:3649
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:3652
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:3697
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:3678
const VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:3643
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3673
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:3719
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:3729
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3724
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:3710
uint32_t moveCount
Definition: vk_mem_alloc.h:3711
VmaDefragmentationPassMoveInfo * pMoves
Definition: vk_mem_alloc.h:3712
Definition: vk_mem_alloc.h:3700
VkDeviceMemory memory
Definition: vk_mem_alloc.h:3702
VkDeviceSize offset
Definition: vk_mem_alloc.h:3703
VmaAllocation allocation
Definition: vk_mem_alloc.h:3701
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:3733
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:3741
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3735
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:3737
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:3739
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:2222
void * pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:2228
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:2224
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:2226
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:3047
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:3095
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:3050
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:3053
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:3089
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:3062
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:3067
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:3075
Represents custom memory pool.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:3100
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:3103
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:3122
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:3119
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:3109
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3106
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3112
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:2398
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:2408
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:2400
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:2578
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2589
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2589
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2588
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2590
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:2582
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2590
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2586
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:2580
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2589
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:2584
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2590
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2595
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2597
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2596
VmaStatInfo total
Definition: vk_mem_alloc.h:2598
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:2352
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:2362
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:2367
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:2355
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:2359
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:2364
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:2356
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:2363
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:2360
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:2354
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:2353
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:2366
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:2368
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:2361
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:2357
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:2358
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:2369
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:2365
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:2208
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VkResult vmaEndDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context)
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:2029
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:3043
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:2384
@ VMA_RECORD_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2392
@ VMA_RECORD_FLUSH_AFTER_CALL_BIT
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:2390
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:2232
@ VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
Definition: vk_mem_alloc.h:2307
@ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:2237
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT
Definition: vk_mem_alloc.h:2289
@ VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
Definition: vk_mem_alloc.h:2325
@ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT
Definition: vk_mem_alloc.h:2277
@ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:2262
@ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2344
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT
Definition: vk_mem_alloc.h:2342
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2876
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
void vmaFreeMemory(VmaAllocator allocator, const VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:3618
@ VMA_DEFRAGMENTATION_FLAG_INCREMENTAL
Definition: vk_mem_alloc.h:3619
@ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3620
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
struct VmaDefragmentationPassInfo VmaDefragmentationPassInfo
Parameters for incremental defragmentation steps.
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:2201
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, const VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:3622
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2987
@ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3022
@ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3041
@ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3033
@ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:3005
@ VMA_POOL_CREATE_ALGORITHM_MASK
Definition: vk_mem_alloc.h:3037
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkResult vmaDefragment(VmaAllocator allocator, const VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
VmaMemoryUsage
Definition: vk_mem_alloc.h:2700
@ VMA_MEMORY_USAGE_MAX_ENUM
Definition: vk_mem_alloc.h:2763
@ VMA_MEMORY_USAGE_CPU_ONLY
Definition: vk_mem_alloc.h:2731
@ VMA_MEMORY_USAGE_CPU_COPY
Definition: vk_mem_alloc.h:2753
@ VMA_MEMORY_USAGE_GPU_TO_CPU
Definition: vk_mem_alloc.h:2747
@ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED
Definition: vk_mem_alloc.h:2761
@ VMA_MEMORY_USAGE_CPU_TO_GPU
Definition: vk_mem_alloc.h:2738
@ VMA_MEMORY_USAGE_GPU_ONLY
Definition: vk_mem_alloc.h:2721
@ VMA_MEMORY_USAGE_UNKNOWN
Definition: vk_mem_alloc.h:2704
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
VkResult vmaInvalidateAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Invalidates memory of given set of allocations.
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
VkResult vmaBeginDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context, VmaDefragmentationPassInfo *pInfo)
VkResult vmaFlushAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Flushes memory of given set of allocations.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:2346
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2767
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Definition: vk_mem_alloc.h:2862
@ VMA_ALLOCATION_CREATE_MAPPED_BIT
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2798
@ VMA_ALLOCATION_CREATE_DONT_BIND_BIT
Definition: vk_mem_alloc.h:2835
@ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT
Definition: vk_mem_alloc.h:2855
@ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2774
@ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Definition: vk_mem_alloc.h:2829
@ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
Definition: vk_mem_alloc.h:2811
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT
Definition: vk_mem_alloc.h:2865
@ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
Definition: vk_mem_alloc.h:2818
@ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT
Definition: vk_mem_alloc.h:2844
@ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2785
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Definition: vk_mem_alloc.h:2859
@ VMA_ALLOCATION_CREATE_STRATEGY_MASK
Definition: vk_mem_alloc.h:2869
@ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT
Definition: vk_mem_alloc.h:2824
@ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT
Definition: vk_mem_alloc.h:2839
@ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT
Definition: vk_mem_alloc.h:2848
@ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2874
void vmaSetPoolName(VmaAllocator allocator, VmaPool pool, const char *pName)
Sets name of a custom pool.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
void vmaGetBudget(VmaAllocator allocator, VmaBudget *pBudget)
Retrieves information about current memory budget for all memory heaps.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
void vmaGetPoolName(VmaAllocator allocator, VmaPool pool, const char **ppName)
Retrieves name of a custom pool.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:2394
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
void vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo *pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.