23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2011 #ifndef VMA_RECORDING_ENABLED
2012 #define VMA_RECORDING_ENABLED 0
2015 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2016 #define NOMINMAX // For windows.h
2019 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2020 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2021 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2022 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2023 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2024 extern PFN_vkAllocateMemory vkAllocateMemory;
2025 extern PFN_vkFreeMemory vkFreeMemory;
2026 extern PFN_vkMapMemory vkMapMemory;
2027 extern PFN_vkUnmapMemory vkUnmapMemory;
2028 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2029 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2030 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2031 extern PFN_vkBindImageMemory vkBindImageMemory;
2032 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2033 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2034 extern PFN_vkCreateBuffer vkCreateBuffer;
2035 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2036 extern PFN_vkCreateImage vkCreateImage;
2037 extern PFN_vkDestroyImage vkDestroyImage;
2038 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2039 #if VMA_VULKAN_VERSION >= 1001000
2040 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2041 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2042 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2043 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2044 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2045 #endif // #if VMA_VULKAN_VERSION >= 1001000
2046 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
2049 #include <vulkan/vulkan.h>
2055 #if !defined(VMA_VULKAN_VERSION)
2056 #if defined(VK_VERSION_1_2)
2057 #define VMA_VULKAN_VERSION 1002000
2058 #elif defined(VK_VERSION_1_1)
2059 #define VMA_VULKAN_VERSION 1001000
2061 #define VMA_VULKAN_VERSION 1000000
2065 #if !defined(VMA_DEDICATED_ALLOCATION)
2066 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2067 #define VMA_DEDICATED_ALLOCATION 1
2069 #define VMA_DEDICATED_ALLOCATION 0
2073 #if !defined(VMA_BIND_MEMORY2)
2074 #if VK_KHR_bind_memory2
2075 #define VMA_BIND_MEMORY2 1
2077 #define VMA_BIND_MEMORY2 0
2081 #if !defined(VMA_MEMORY_BUDGET)
2082 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2083 #define VMA_MEMORY_BUDGET 1
2085 #define VMA_MEMORY_BUDGET 0
2090 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2091 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2092 #define VMA_BUFFER_DEVICE_ADDRESS 1
2094 #define VMA_BUFFER_DEVICE_ADDRESS 0
2103 #ifndef VMA_CALL_PRE
2104 #define VMA_CALL_PRE
2106 #ifndef VMA_CALL_POST
2107 #define VMA_CALL_POST
2121 #ifndef VMA_LEN_IF_NOT_NULL
2122 #define VMA_LEN_IF_NOT_NULL(len)
2127 #ifndef VMA_NULLABLE
2129 #define VMA_NULLABLE _Nullable
2131 #define VMA_NULLABLE
2137 #ifndef VMA_NOT_NULL
2139 #define VMA_NOT_NULL _Nonnull
2141 #define VMA_NOT_NULL
2147 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2148 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2149 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2151 #define VMA_NOT_NULL_NON_DISPATCHABLE
2155 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2156 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2157 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2159 #define VMA_NULLABLE_NON_DISPATCHABLE
2177 uint32_t memoryType,
2178 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2180 void* VMA_NULLABLE pUserData);
2184 uint32_t memoryType,
2185 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2187 void* VMA_NULLABLE pUserData);
2327 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2328 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2329 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2331 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2332 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2333 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2335 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2336 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2426 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2499 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2507 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2517 uint32_t memoryTypeIndex,
2518 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2530 uint32_t frameIndex);
2626 #ifndef VMA_STATS_STRING_ENABLED
2627 #define VMA_STATS_STRING_ENABLED 1
2630 #if VMA_STATS_STRING_ENABLED
2637 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2638 VkBool32 detailedMap);
2642 char* VMA_NULLABLE pStatsString);
2644 #endif // #if VMA_STATS_STRING_ENABLED
2896 uint32_t memoryTypeBits,
2898 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2914 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2916 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2932 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2934 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3078 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3106 size_t* VMA_NULLABLE pLostAllocationCount);
3133 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3143 const char* VMA_NULLABLE pName);
3232 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3258 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3260 size_t allocationCount,
3261 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3262 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3272 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3280 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3305 size_t allocationCount,
3306 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3318 VkDeviceSize newSize);
3375 void* VMA_NULLABLE pUserData);
3432 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3470 VkDeviceSize offset,
3497 VkDeviceSize offset,
3516 uint32_t allocationCount,
3517 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3518 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3519 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3537 uint32_t allocationCount,
3538 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3539 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3540 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3619 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3653 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3791 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3792 size_t allocationCount,
3793 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3812 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3827 VkDeviceSize allocationLocalOffset,
3828 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3829 const void* VMA_NULLABLE pNext);
3846 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3861 VkDeviceSize allocationLocalOffset,
3862 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3863 const void* VMA_NULLABLE pNext);
3893 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3895 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3912 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3918 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3920 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3937 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
3944 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3947 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3948 #define VMA_IMPLEMENTATION
3951 #ifdef VMA_IMPLEMENTATION
3952 #undef VMA_IMPLEMENTATION
3959 #if VMA_RECORDING_ENABLED
3962 #include <windows.h>
3982 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3983 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3992 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
3993 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
3994 #if defined(VK_NO_PROTOTYPES)
3995 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
3996 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4009 #if VMA_USE_STL_CONTAINERS
4010 #define VMA_USE_STL_VECTOR 1
4011 #define VMA_USE_STL_UNORDERED_MAP 1
4012 #define VMA_USE_STL_LIST 1
4015 #ifndef VMA_USE_STL_SHARED_MUTEX
4017 #if __cplusplus >= 201703L
4018 #define VMA_USE_STL_SHARED_MUTEX 1
4022 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4023 #define VMA_USE_STL_SHARED_MUTEX 1
4025 #define VMA_USE_STL_SHARED_MUTEX 0
4033 #if VMA_USE_STL_VECTOR
4037 #if VMA_USE_STL_UNORDERED_MAP
4038 #include <unordered_map>
4041 #if VMA_USE_STL_LIST
4050 #include <algorithm>
4055 #define VMA_NULL nullptr
4058 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4060 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4063 if(alignment <
sizeof(
void*))
4065 alignment =
sizeof(
void*);
4068 return memalign(alignment, size);
4070 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4073 #if defined(__APPLE__)
4074 #include <AvailabilityMacros.h>
4077 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4079 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4080 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4087 if (__builtin_available(macOS 10.15, iOS 13, *))
4088 return aligned_alloc(alignment, size);
4092 if(alignment <
sizeof(
void*))
4094 alignment =
sizeof(
void*);
4098 if(posix_memalign(&pointer, alignment, size) == 0)
4102 #elif defined(_WIN32)
4103 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4105 return _aligned_malloc(size, alignment);
4108 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4110 return aligned_alloc(alignment, size);
4115 static void vma_aligned_free(
void* ptr)
4120 static void vma_aligned_free(
void* ptr)
4134 #define VMA_ASSERT(expr)
4136 #define VMA_ASSERT(expr) assert(expr)
4142 #ifndef VMA_HEAVY_ASSERT
4144 #define VMA_HEAVY_ASSERT(expr)
4146 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
4150 #ifndef VMA_ALIGN_OF
4151 #define VMA_ALIGN_OF(type) (__alignof(type))
4154 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4155 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4158 #ifndef VMA_SYSTEM_ALIGNED_FREE
4160 #if defined(VMA_SYSTEM_FREE)
4161 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4163 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4168 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4172 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4176 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4180 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4183 #ifndef VMA_DEBUG_LOG
4184 #define VMA_DEBUG_LOG(format, ...)
4194 #if VMA_STATS_STRING_ENABLED
4195 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4197 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4199 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4201 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4203 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4205 snprintf(outStr, strLen,
"%p", ptr);
4213 void Lock() { m_Mutex.lock(); }
4214 void Unlock() { m_Mutex.unlock(); }
4215 bool TryLock() {
return m_Mutex.try_lock(); }
4219 #define VMA_MUTEX VmaMutex
4223 #ifndef VMA_RW_MUTEX
4224 #if VMA_USE_STL_SHARED_MUTEX
4226 #include <shared_mutex>
4230 void LockRead() { m_Mutex.lock_shared(); }
4231 void UnlockRead() { m_Mutex.unlock_shared(); }
4232 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4233 void LockWrite() { m_Mutex.lock(); }
4234 void UnlockWrite() { m_Mutex.unlock(); }
4235 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4237 std::shared_mutex m_Mutex;
4239 #define VMA_RW_MUTEX VmaRWMutex
4240 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4246 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4247 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4248 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4249 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4250 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4251 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4252 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4256 #define VMA_RW_MUTEX VmaRWMutex
4262 void LockRead() { m_Mutex.Lock(); }
4263 void UnlockRead() { m_Mutex.Unlock(); }
4264 bool TryLockRead() {
return m_Mutex.TryLock(); }
4265 void LockWrite() { m_Mutex.Lock(); }
4266 void UnlockWrite() { m_Mutex.Unlock(); }
4267 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4271 #define VMA_RW_MUTEX VmaRWMutex
4272 #endif // #if VMA_USE_STL_SHARED_MUTEX
4273 #endif // #ifndef VMA_RW_MUTEX
4278 #ifndef VMA_ATOMIC_UINT32
4280 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4283 #ifndef VMA_ATOMIC_UINT64
4285 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4288 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4293 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4296 #ifndef VMA_DEBUG_ALIGNMENT
4301 #define VMA_DEBUG_ALIGNMENT (1)
4304 #ifndef VMA_DEBUG_MARGIN
4309 #define VMA_DEBUG_MARGIN (0)
4312 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4317 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4320 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4326 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4329 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4334 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4337 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4342 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4345 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4346 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4350 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4351 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4355 #ifndef VMA_CLASS_NO_COPY
4356 #define VMA_CLASS_NO_COPY(className) \
4358 className(const className&) = delete; \
4359 className& operator=(const className&) = delete;
4362 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4365 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4367 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4368 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4376 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4377 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4378 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4380 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4382 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4383 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4386 static inline uint32_t VmaCountBitsSet(uint32_t v)
4388 uint32_t c = v - ((v >> 1) & 0x55555555);
4389 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4390 c = ((c >> 4) + c) & 0x0F0F0F0F;
4391 c = ((c >> 8) + c) & 0x00FF00FF;
4392 c = ((c >> 16) + c) & 0x0000FFFF;
4401 template <
typename T>
4402 inline bool VmaIsPow2(T x)
4404 return (x & (x-1)) == 0;
4409 template <
typename T>
4410 static inline T VmaAlignUp(T val, T alignment)
4412 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4413 return (val + alignment - 1) & ~(alignment - 1);
4417 template <
typename T>
4418 static inline T VmaAlignDown(T val, T alignment)
4420 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4421 return val & ~(alignment - 1);
4425 template <
typename T>
4426 static inline T VmaRoundDiv(T x, T y)
4428 return (x + (y / (T)2)) / y;
4432 static inline uint32_t VmaNextPow2(uint32_t v)
4443 static inline uint64_t VmaNextPow2(uint64_t v)
4457 static inline uint32_t VmaPrevPow2(uint32_t v)
4467 static inline uint64_t VmaPrevPow2(uint64_t v)
4479 static inline bool VmaStrIsEmpty(
const char* pStr)
4481 return pStr == VMA_NULL || *pStr ==
'\0';
4484 #if VMA_STATS_STRING_ENABLED
4486 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4502 #endif // #if VMA_STATS_STRING_ENABLED
4506 template<
typename Iterator,
typename Compare>
4507 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4509 Iterator centerValue = end; --centerValue;
4510 Iterator insertIndex = beg;
4511 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4513 if(cmp(*memTypeIndex, *centerValue))
4515 if(insertIndex != memTypeIndex)
4517 VMA_SWAP(*memTypeIndex, *insertIndex);
4522 if(insertIndex != centerValue)
4524 VMA_SWAP(*insertIndex, *centerValue);
4529 template<
typename Iterator,
typename Compare>
4530 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4534 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4535 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4536 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4540 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4542 #endif // #ifndef VMA_SORT
4551 static inline bool VmaBlocksOnSamePage(
4552 VkDeviceSize resourceAOffset,
4553 VkDeviceSize resourceASize,
4554 VkDeviceSize resourceBOffset,
4555 VkDeviceSize pageSize)
4557 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4558 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4559 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4560 VkDeviceSize resourceBStart = resourceBOffset;
4561 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4562 return resourceAEndPage == resourceBStartPage;
4565 enum VmaSuballocationType
4567 VMA_SUBALLOCATION_TYPE_FREE = 0,
4568 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4569 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4570 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4571 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4572 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4573 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4582 static inline bool VmaIsBufferImageGranularityConflict(
4583 VmaSuballocationType suballocType1,
4584 VmaSuballocationType suballocType2)
4586 if(suballocType1 > suballocType2)
4588 VMA_SWAP(suballocType1, suballocType2);
4591 switch(suballocType1)
4593 case VMA_SUBALLOCATION_TYPE_FREE:
4595 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4597 case VMA_SUBALLOCATION_TYPE_BUFFER:
4599 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4600 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4601 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4603 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4604 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4605 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4606 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4608 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4609 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4617 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4619 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4620 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4621 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4622 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4624 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4631 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4633 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4634 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4635 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4636 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4638 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4651 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4653 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4654 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4655 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4656 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4662 VMA_CLASS_NO_COPY(VmaMutexLock)
4664 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4665 m_pMutex(useMutex ? &mutex : VMA_NULL)
4666 {
if(m_pMutex) { m_pMutex->Lock(); } }
4668 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4670 VMA_MUTEX* m_pMutex;
4674 struct VmaMutexLockRead
4676 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4678 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4679 m_pMutex(useMutex ? &mutex : VMA_NULL)
4680 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4681 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4683 VMA_RW_MUTEX* m_pMutex;
4687 struct VmaMutexLockWrite
4689 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4691 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4692 m_pMutex(useMutex ? &mutex : VMA_NULL)
4693 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4694 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4696 VMA_RW_MUTEX* m_pMutex;
4699 #if VMA_DEBUG_GLOBAL_MUTEX
4700 static VMA_MUTEX gDebugGlobalMutex;
4701 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4703 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4707 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4718 template <
typename CmpLess,
typename IterT,
typename KeyT>
4719 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4721 size_t down = 0, up = (end - beg);
4724 const size_t mid = (down + up) / 2;
4725 if(cmp(*(beg+mid), key))
4737 template<
typename CmpLess,
typename IterT,
typename KeyT>
4738 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4740 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4741 beg, end, value, cmp);
4743 (!cmp(*it, value) && !cmp(value, *it)))
4755 template<
typename T>
4756 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4758 for(uint32_t i = 0; i < count; ++i)
4760 const T iPtr = arr[i];
4761 if(iPtr == VMA_NULL)
4765 for(uint32_t j = i + 1; j < count; ++j)
4776 template<
typename MainT,
typename NewT>
4777 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4779 newStruct->pNext = mainStruct->pNext;
4780 mainStruct->pNext = newStruct;
4786 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4788 void* result = VMA_NULL;
4789 if((pAllocationCallbacks != VMA_NULL) &&
4790 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4792 result = (*pAllocationCallbacks->pfnAllocation)(
4793 pAllocationCallbacks->pUserData,
4796 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4800 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4802 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4806 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4808 if((pAllocationCallbacks != VMA_NULL) &&
4809 (pAllocationCallbacks->pfnFree != VMA_NULL))
4811 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4815 VMA_SYSTEM_ALIGNED_FREE(ptr);
4819 template<
typename T>
4820 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4822 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4825 template<
typename T>
4826 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4828 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4831 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4833 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4835 template<
typename T>
4836 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4839 VmaFree(pAllocationCallbacks, ptr);
4842 template<
typename T>
4843 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4847 for(
size_t i = count; i--; )
4851 VmaFree(pAllocationCallbacks, ptr);
4855 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4857 if(srcStr != VMA_NULL)
4859 const size_t len = strlen(srcStr);
4860 char*
const result = vma_new_array(allocs,
char, len + 1);
4861 memcpy(result, srcStr, len + 1);
4870 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4874 const size_t len = strlen(str);
4875 vma_delete_array(allocs, str, len + 1);
4880 template<
typename T>
4881 class VmaStlAllocator
4884 const VkAllocationCallbacks*
const m_pCallbacks;
4885 typedef T value_type;
4887 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4888 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4890 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4891 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4893 template<
typename U>
4894 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4896 return m_pCallbacks == rhs.m_pCallbacks;
4898 template<
typename U>
4899 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4901 return m_pCallbacks != rhs.m_pCallbacks;
4904 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4907 #if VMA_USE_STL_VECTOR
4909 #define VmaVector std::vector
4911 template<
typename T,
typename allocatorT>
4912 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4914 vec.insert(vec.begin() + index, item);
4917 template<
typename T,
typename allocatorT>
4918 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4920 vec.erase(vec.begin() + index);
4923 #else // #if VMA_USE_STL_VECTOR
4928 template<
typename T,
typename AllocatorT>
4932 typedef T value_type;
4934 VmaVector(
const AllocatorT& allocator) :
4935 m_Allocator(allocator),
4942 VmaVector(
size_t count,
const AllocatorT& allocator) :
4943 m_Allocator(allocator),
4944 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4952 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4953 : VmaVector(count, allocator) {}
4955 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4956 m_Allocator(src.m_Allocator),
4957 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4958 m_Count(src.m_Count),
4959 m_Capacity(src.m_Count)
4963 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4969 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4972 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4976 resize(rhs.m_Count);
4979 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4985 bool empty()
const {
return m_Count == 0; }
4986 size_t size()
const {
return m_Count; }
4987 T* data() {
return m_pArray; }
4988 const T* data()
const {
return m_pArray; }
4990 T& operator[](
size_t index)
4992 VMA_HEAVY_ASSERT(index < m_Count);
4993 return m_pArray[index];
4995 const T& operator[](
size_t index)
const
4997 VMA_HEAVY_ASSERT(index < m_Count);
4998 return m_pArray[index];
5003 VMA_HEAVY_ASSERT(m_Count > 0);
5006 const T& front()
const
5008 VMA_HEAVY_ASSERT(m_Count > 0);
5013 VMA_HEAVY_ASSERT(m_Count > 0);
5014 return m_pArray[m_Count - 1];
5016 const T& back()
const
5018 VMA_HEAVY_ASSERT(m_Count > 0);
5019 return m_pArray[m_Count - 1];
5022 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5024 newCapacity = VMA_MAX(newCapacity, m_Count);
5026 if((newCapacity < m_Capacity) && !freeMemory)
5028 newCapacity = m_Capacity;
5031 if(newCapacity != m_Capacity)
5033 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5036 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5038 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5039 m_Capacity = newCapacity;
5040 m_pArray = newArray;
5044 void resize(
size_t newCount,
bool freeMemory =
false)
5046 size_t newCapacity = m_Capacity;
5047 if(newCount > m_Capacity)
5049 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5053 newCapacity = newCount;
5056 if(newCapacity != m_Capacity)
5058 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5059 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5060 if(elementsToCopy != 0)
5062 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5064 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5065 m_Capacity = newCapacity;
5066 m_pArray = newArray;
5072 void clear(
bool freeMemory =
false)
5074 resize(0, freeMemory);
5077 void insert(
size_t index,
const T& src)
5079 VMA_HEAVY_ASSERT(index <= m_Count);
5080 const size_t oldCount = size();
5081 resize(oldCount + 1);
5082 if(index < oldCount)
5084 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5086 m_pArray[index] = src;
5089 void remove(
size_t index)
5091 VMA_HEAVY_ASSERT(index < m_Count);
5092 const size_t oldCount = size();
5093 if(index < oldCount - 1)
5095 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5097 resize(oldCount - 1);
5100 void push_back(
const T& src)
5102 const size_t newIndex = size();
5103 resize(newIndex + 1);
5104 m_pArray[newIndex] = src;
5109 VMA_HEAVY_ASSERT(m_Count > 0);
5113 void push_front(
const T& src)
5120 VMA_HEAVY_ASSERT(m_Count > 0);
5124 typedef T* iterator;
5126 iterator begin() {
return m_pArray; }
5127 iterator end() {
return m_pArray + m_Count; }
5130 AllocatorT m_Allocator;
5136 template<
typename T,
typename allocatorT>
5137 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5139 vec.insert(index, item);
5142 template<
typename T,
typename allocatorT>
5143 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5148 #endif // #if VMA_USE_STL_VECTOR
5150 template<
typename CmpLess,
typename VectorT>
5151 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5153 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5155 vector.data() + vector.size(),
5157 CmpLess()) - vector.data();
5158 VmaVectorInsert(vector, indexToInsert, value);
5159 return indexToInsert;
5162 template<
typename CmpLess,
typename VectorT>
5163 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5166 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5171 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5173 size_t indexToRemove = it - vector.begin();
5174 VmaVectorRemove(vector, indexToRemove);
5191 template<
typename T,
typename AllocatorT,
size_t N>
5192 class VmaSmallVector
5195 typedef T value_type;
5197 VmaSmallVector(
const AllocatorT& allocator) :
5199 m_DynamicArray(allocator)
5202 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5204 m_DynamicArray(count > N ? count : 0, allocator)
5207 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5208 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5209 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5210 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5212 bool empty()
const {
return m_Count == 0; }
5213 size_t size()
const {
return m_Count; }
5214 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5215 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5217 T& operator[](
size_t index)
5219 VMA_HEAVY_ASSERT(index < m_Count);
5220 return data()[index];
5222 const T& operator[](
size_t index)
const
5224 VMA_HEAVY_ASSERT(index < m_Count);
5225 return data()[index];
5230 VMA_HEAVY_ASSERT(m_Count > 0);
5233 const T& front()
const
5235 VMA_HEAVY_ASSERT(m_Count > 0);
5240 VMA_HEAVY_ASSERT(m_Count > 0);
5241 return data()[m_Count - 1];
5243 const T& back()
const
5245 VMA_HEAVY_ASSERT(m_Count > 0);
5246 return data()[m_Count - 1];
5249 void resize(
size_t newCount,
bool freeMemory =
false)
5251 if(newCount > N && m_Count > N)
5254 m_DynamicArray.resize(newCount, freeMemory);
5256 else if(newCount > N && m_Count <= N)
5259 m_DynamicArray.resize(newCount, freeMemory);
5262 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5265 else if(newCount <= N && m_Count > N)
5270 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5272 m_DynamicArray.resize(0, freeMemory);
5281 void clear(
bool freeMemory =
false)
5283 m_DynamicArray.clear(freeMemory);
5287 void insert(
size_t index,
const T& src)
5289 VMA_HEAVY_ASSERT(index <= m_Count);
5290 const size_t oldCount = size();
5291 resize(oldCount + 1);
5292 T*
const dataPtr = data();
5293 if(index < oldCount)
5296 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5298 dataPtr[index] = src;
5301 void remove(
size_t index)
5303 VMA_HEAVY_ASSERT(index < m_Count);
5304 const size_t oldCount = size();
5305 if(index < oldCount - 1)
5308 T*
const dataPtr = data();
5309 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5311 resize(oldCount - 1);
5314 void push_back(
const T& src)
5316 const size_t newIndex = size();
5317 resize(newIndex + 1);
5318 data()[newIndex] = src;
5323 VMA_HEAVY_ASSERT(m_Count > 0);
5327 void push_front(
const T& src)
5334 VMA_HEAVY_ASSERT(m_Count > 0);
5338 typedef T* iterator;
5340 iterator begin() {
return data(); }
5341 iterator end() {
return data() + m_Count; }
5346 VmaVector<T, AllocatorT> m_DynamicArray;
5357 template<
typename T>
5358 class VmaPoolAllocator
5360 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5362 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5363 ~VmaPoolAllocator();
5364 template<
typename... Types> T* Alloc(Types... args);
5370 uint32_t NextFreeIndex;
5371 alignas(T)
char Value[
sizeof(T)];
5378 uint32_t FirstFreeIndex;
5381 const VkAllocationCallbacks* m_pAllocationCallbacks;
5382 const uint32_t m_FirstBlockCapacity;
5383 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5385 ItemBlock& CreateNewBlock();
5388 template<
typename T>
5389 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5390 m_pAllocationCallbacks(pAllocationCallbacks),
5391 m_FirstBlockCapacity(firstBlockCapacity),
5392 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5394 VMA_ASSERT(m_FirstBlockCapacity > 1);
5397 template<
typename T>
5398 VmaPoolAllocator<T>::~VmaPoolAllocator()
5400 for(
size_t i = m_ItemBlocks.size(); i--; )
5401 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5402 m_ItemBlocks.clear();
5405 template<
typename T>
5406 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5408 for(
size_t i = m_ItemBlocks.size(); i--; )
5410 ItemBlock& block = m_ItemBlocks[i];
5412 if(block.FirstFreeIndex != UINT32_MAX)
5414 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5415 block.FirstFreeIndex = pItem->NextFreeIndex;
5416 T* result = (T*)&pItem->Value;
5417 new(result)T(std::forward<Types>(args)...);
5423 ItemBlock& newBlock = CreateNewBlock();
5424 Item*
const pItem = &newBlock.pItems[0];
5425 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5426 T* result = (T*)&pItem->Value;
5427 new(result)T(std::forward<Types>(args)...);
5431 template<
typename T>
5432 void VmaPoolAllocator<T>::Free(T* ptr)
5435 for(
size_t i = m_ItemBlocks.size(); i--; )
5437 ItemBlock& block = m_ItemBlocks[i];
5441 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5444 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5447 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5448 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5449 block.FirstFreeIndex = index;
5453 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5456 template<
typename T>
5457 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5459 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5460 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5462 const ItemBlock newBlock = {
5463 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5467 m_ItemBlocks.push_back(newBlock);
5470 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5471 newBlock.pItems[i].NextFreeIndex = i + 1;
5472 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5473 return m_ItemBlocks.back();
5479 #if VMA_USE_STL_LIST
5481 #define VmaList std::list
5483 #else // #if VMA_USE_STL_LIST
5485 template<
typename T>
5494 template<
typename T>
5497 VMA_CLASS_NO_COPY(VmaRawList)
5499 typedef VmaListItem<T> ItemType;
5501 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5505 size_t GetCount()
const {
return m_Count; }
5506 bool IsEmpty()
const {
return m_Count == 0; }
5508 ItemType* Front() {
return m_pFront; }
5509 const ItemType* Front()
const {
return m_pFront; }
5510 ItemType* Back() {
return m_pBack; }
5511 const ItemType* Back()
const {
return m_pBack; }
5513 ItemType* PushBack();
5514 ItemType* PushFront();
5515 ItemType* PushBack(
const T& value);
5516 ItemType* PushFront(
const T& value);
5521 ItemType* InsertBefore(ItemType* pItem);
5523 ItemType* InsertAfter(ItemType* pItem);
5525 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5526 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5528 void Remove(ItemType* pItem);
5531 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5532 VmaPoolAllocator<ItemType> m_ItemAllocator;
5538 template<
typename T>
5539 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5540 m_pAllocationCallbacks(pAllocationCallbacks),
5541 m_ItemAllocator(pAllocationCallbacks, 128),
5548 template<
typename T>
5549 VmaRawList<T>::~VmaRawList()
5555 template<
typename T>
5556 void VmaRawList<T>::Clear()
5558 if(IsEmpty() ==
false)
5560 ItemType* pItem = m_pBack;
5561 while(pItem != VMA_NULL)
5563 ItemType*
const pPrevItem = pItem->pPrev;
5564 m_ItemAllocator.Free(pItem);
5567 m_pFront = VMA_NULL;
5573 template<
typename T>
5574 VmaListItem<T>* VmaRawList<T>::PushBack()
5576 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5577 pNewItem->pNext = VMA_NULL;
5580 pNewItem->pPrev = VMA_NULL;
5581 m_pFront = pNewItem;
5587 pNewItem->pPrev = m_pBack;
5588 m_pBack->pNext = pNewItem;
5595 template<
typename T>
5596 VmaListItem<T>* VmaRawList<T>::PushFront()
5598 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5599 pNewItem->pPrev = VMA_NULL;
5602 pNewItem->pNext = VMA_NULL;
5603 m_pFront = pNewItem;
5609 pNewItem->pNext = m_pFront;
5610 m_pFront->pPrev = pNewItem;
5611 m_pFront = pNewItem;
5617 template<
typename T>
5618 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5620 ItemType*
const pNewItem = PushBack();
5621 pNewItem->Value = value;
5625 template<
typename T>
5626 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5628 ItemType*
const pNewItem = PushFront();
5629 pNewItem->Value = value;
5633 template<
typename T>
5634 void VmaRawList<T>::PopBack()
5636 VMA_HEAVY_ASSERT(m_Count > 0);
5637 ItemType*
const pBackItem = m_pBack;
5638 ItemType*
const pPrevItem = pBackItem->pPrev;
5639 if(pPrevItem != VMA_NULL)
5641 pPrevItem->pNext = VMA_NULL;
5643 m_pBack = pPrevItem;
5644 m_ItemAllocator.Free(pBackItem);
5648 template<
typename T>
5649 void VmaRawList<T>::PopFront()
5651 VMA_HEAVY_ASSERT(m_Count > 0);
5652 ItemType*
const pFrontItem = m_pFront;
5653 ItemType*
const pNextItem = pFrontItem->pNext;
5654 if(pNextItem != VMA_NULL)
5656 pNextItem->pPrev = VMA_NULL;
5658 m_pFront = pNextItem;
5659 m_ItemAllocator.Free(pFrontItem);
5663 template<
typename T>
5664 void VmaRawList<T>::Remove(ItemType* pItem)
5666 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5667 VMA_HEAVY_ASSERT(m_Count > 0);
5669 if(pItem->pPrev != VMA_NULL)
5671 pItem->pPrev->pNext = pItem->pNext;
5675 VMA_HEAVY_ASSERT(m_pFront == pItem);
5676 m_pFront = pItem->pNext;
5679 if(pItem->pNext != VMA_NULL)
5681 pItem->pNext->pPrev = pItem->pPrev;
5685 VMA_HEAVY_ASSERT(m_pBack == pItem);
5686 m_pBack = pItem->pPrev;
5689 m_ItemAllocator.Free(pItem);
5693 template<
typename T>
5694 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5696 if(pItem != VMA_NULL)
5698 ItemType*
const prevItem = pItem->pPrev;
5699 ItemType*
const newItem = m_ItemAllocator.Alloc();
5700 newItem->pPrev = prevItem;
5701 newItem->pNext = pItem;
5702 pItem->pPrev = newItem;
5703 if(prevItem != VMA_NULL)
5705 prevItem->pNext = newItem;
5709 VMA_HEAVY_ASSERT(m_pFront == pItem);
5719 template<
typename T>
5720 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5722 if(pItem != VMA_NULL)
5724 ItemType*
const nextItem = pItem->pNext;
5725 ItemType*
const newItem = m_ItemAllocator.Alloc();
5726 newItem->pNext = nextItem;
5727 newItem->pPrev = pItem;
5728 pItem->pNext = newItem;
5729 if(nextItem != VMA_NULL)
5731 nextItem->pPrev = newItem;
5735 VMA_HEAVY_ASSERT(m_pBack == pItem);
5745 template<
typename T>
5746 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5748 ItemType*
const newItem = InsertBefore(pItem);
5749 newItem->Value = value;
5753 template<
typename T>
5754 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5756 ItemType*
const newItem = InsertAfter(pItem);
5757 newItem->Value = value;
5761 template<
typename T,
typename AllocatorT>
5764 VMA_CLASS_NO_COPY(VmaList)
5775 T& operator*()
const
5777 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5778 return m_pItem->Value;
5780 T* operator->()
const
5782 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5783 return &m_pItem->Value;
5786 iterator& operator++()
5788 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5789 m_pItem = m_pItem->pNext;
5792 iterator& operator--()
5794 if(m_pItem != VMA_NULL)
5796 m_pItem = m_pItem->pPrev;
5800 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5801 m_pItem = m_pList->Back();
5806 iterator operator++(
int)
5808 iterator result = *
this;
5812 iterator operator--(
int)
5814 iterator result = *
this;
5819 bool operator==(
const iterator& rhs)
const
5821 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5822 return m_pItem == rhs.m_pItem;
5824 bool operator!=(
const iterator& rhs)
const
5826 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5827 return m_pItem != rhs.m_pItem;
5831 VmaRawList<T>* m_pList;
5832 VmaListItem<T>* m_pItem;
5834 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5840 friend class VmaList<T, AllocatorT>;
5843 class const_iterator
5852 const_iterator(
const iterator& src) :
5853 m_pList(src.m_pList),
5854 m_pItem(src.m_pItem)
5858 const T& operator*()
const
5860 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5861 return m_pItem->Value;
5863 const T* operator->()
const
5865 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5866 return &m_pItem->Value;
5869 const_iterator& operator++()
5871 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5872 m_pItem = m_pItem->pNext;
5875 const_iterator& operator--()
5877 if(m_pItem != VMA_NULL)
5879 m_pItem = m_pItem->pPrev;
5883 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5884 m_pItem = m_pList->Back();
5889 const_iterator operator++(
int)
5891 const_iterator result = *
this;
5895 const_iterator operator--(
int)
5897 const_iterator result = *
this;
5902 bool operator==(
const const_iterator& rhs)
const
5904 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5905 return m_pItem == rhs.m_pItem;
5907 bool operator!=(
const const_iterator& rhs)
const
5909 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5910 return m_pItem != rhs.m_pItem;
5914 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5920 const VmaRawList<T>* m_pList;
5921 const VmaListItem<T>* m_pItem;
5923 friend class VmaList<T, AllocatorT>;
5926 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5928 bool empty()
const {
return m_RawList.IsEmpty(); }
5929 size_t size()
const {
return m_RawList.GetCount(); }
5931 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5932 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5934 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5935 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5937 void clear() { m_RawList.Clear(); }
5938 void push_back(
const T& value) { m_RawList.PushBack(value); }
5939 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5940 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5943 VmaRawList<T> m_RawList;
5946 #endif // #if VMA_USE_STL_LIST
5954 #if VMA_USE_STL_UNORDERED_MAP
5956 #define VmaPair std::pair
5958 #define VMA_MAP_TYPE(KeyT, ValueT) \
5959 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5961 #else // #if VMA_USE_STL_UNORDERED_MAP
5963 template<
typename T1,
typename T2>
5969 VmaPair() : first(), second() { }
5970 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5976 template<
typename KeyT,
typename ValueT>
5980 typedef VmaPair<KeyT, ValueT> PairType;
5981 typedef PairType* iterator;
5983 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5985 iterator begin() {
return m_Vector.begin(); }
5986 iterator end() {
return m_Vector.end(); }
5988 void insert(
const PairType& pair);
5989 iterator find(
const KeyT& key);
5990 void erase(iterator it);
5993 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5996 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5998 template<
typename FirstT,
typename SecondT>
5999 struct VmaPairFirstLess
6001 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6003 return lhs.first < rhs.first;
6005 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6007 return lhs.first < rhsFirst;
6011 template<
typename KeyT,
typename ValueT>
6012 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6014 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6016 m_Vector.data() + m_Vector.size(),
6018 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6019 VmaVectorInsert(m_Vector, indexToInsert, pair);
6022 template<
typename KeyT,
typename ValueT>
6023 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6025 PairType* it = VmaBinaryFindFirstNotLess(
6027 m_Vector.data() + m_Vector.size(),
6029 VmaPairFirstLess<KeyT, ValueT>());
6030 if((it != m_Vector.end()) && (it->first == key))
6036 return m_Vector.end();
6040 template<
typename KeyT,
typename ValueT>
6041 void VmaMap<KeyT, ValueT>::erase(iterator it)
6043 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6046 #endif // #if VMA_USE_STL_UNORDERED_MAP
6052 class VmaDeviceMemoryBlock;
6054 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6056 struct VmaAllocation_T
6059 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6063 FLAG_USER_DATA_STRING = 0x01,
6067 enum ALLOCATION_TYPE
6069 ALLOCATION_TYPE_NONE,
6070 ALLOCATION_TYPE_BLOCK,
6071 ALLOCATION_TYPE_DEDICATED,
6078 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6081 m_pUserData{VMA_NULL},
6082 m_LastUseFrameIndex{currentFrameIndex},
6083 m_MemoryTypeIndex{0},
6084 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6085 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6087 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6089 #if VMA_STATS_STRING_ENABLED
6090 m_CreationFrameIndex = currentFrameIndex;
6091 m_BufferImageUsage = 0;
6097 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6100 VMA_ASSERT(m_pUserData == VMA_NULL);
6103 void InitBlockAllocation(
6104 VmaDeviceMemoryBlock* block,
6105 VkDeviceSize offset,
6106 VkDeviceSize alignment,
6108 uint32_t memoryTypeIndex,
6109 VmaSuballocationType suballocationType,
6113 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6114 VMA_ASSERT(block != VMA_NULL);
6115 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6116 m_Alignment = alignment;
6118 m_MemoryTypeIndex = memoryTypeIndex;
6119 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6120 m_SuballocationType = (uint8_t)suballocationType;
6121 m_BlockAllocation.m_Block = block;
6122 m_BlockAllocation.m_Offset = offset;
6123 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6128 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6129 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6130 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6131 m_MemoryTypeIndex = 0;
6132 m_BlockAllocation.m_Block = VMA_NULL;
6133 m_BlockAllocation.m_Offset = 0;
6134 m_BlockAllocation.m_CanBecomeLost =
true;
6137 void ChangeBlockAllocation(
6139 VmaDeviceMemoryBlock* block,
6140 VkDeviceSize offset);
6142 void ChangeOffset(VkDeviceSize newOffset);
6145 void InitDedicatedAllocation(
6146 uint32_t memoryTypeIndex,
6147 VkDeviceMemory hMemory,
6148 VmaSuballocationType suballocationType,
6152 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6153 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6154 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6157 m_MemoryTypeIndex = memoryTypeIndex;
6158 m_SuballocationType = (uint8_t)suballocationType;
6159 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6160 m_DedicatedAllocation.m_hMemory = hMemory;
6161 m_DedicatedAllocation.m_pMappedData = pMappedData;
6164 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6165 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6166 VkDeviceSize GetSize()
const {
return m_Size; }
6167 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6168 void* GetUserData()
const {
return m_pUserData; }
6169 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6170 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6172 VmaDeviceMemoryBlock* GetBlock()
const
6174 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6175 return m_BlockAllocation.m_Block;
6177 VkDeviceSize GetOffset()
const;
6178 VkDeviceMemory GetMemory()
const;
6179 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6180 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6181 void* GetMappedData()
const;
6182 bool CanBecomeLost()
const;
6184 uint32_t GetLastUseFrameIndex()
const
6186 return m_LastUseFrameIndex.load();
6188 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6190 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6200 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6202 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6204 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6215 void BlockAllocMap();
6216 void BlockAllocUnmap();
6217 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6220 #if VMA_STATS_STRING_ENABLED
6221 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6222 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6224 void InitBufferImageUsage(uint32_t bufferImageUsage)
6226 VMA_ASSERT(m_BufferImageUsage == 0);
6227 m_BufferImageUsage = bufferImageUsage;
6230 void PrintParameters(
class VmaJsonWriter& json)
const;
6234 VkDeviceSize m_Alignment;
6235 VkDeviceSize m_Size;
6237 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6238 uint32_t m_MemoryTypeIndex;
6240 uint8_t m_SuballocationType;
6247 struct BlockAllocation
6249 VmaDeviceMemoryBlock* m_Block;
6250 VkDeviceSize m_Offset;
6251 bool m_CanBecomeLost;
6255 struct DedicatedAllocation
6257 VkDeviceMemory m_hMemory;
6258 void* m_pMappedData;
6264 BlockAllocation m_BlockAllocation;
6266 DedicatedAllocation m_DedicatedAllocation;
6269 #if VMA_STATS_STRING_ENABLED
6270 uint32_t m_CreationFrameIndex;
6271 uint32_t m_BufferImageUsage;
6281 struct VmaSuballocation
6283 VkDeviceSize offset;
6286 VmaSuballocationType type;
6290 struct VmaSuballocationOffsetLess
6292 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6294 return lhs.offset < rhs.offset;
6297 struct VmaSuballocationOffsetGreater
6299 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6301 return lhs.offset > rhs.offset;
6305 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6308 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6310 enum class VmaAllocationRequestType
6332 struct VmaAllocationRequest
6334 VkDeviceSize offset;
6335 VkDeviceSize sumFreeSize;
6336 VkDeviceSize sumItemSize;
6337 VmaSuballocationList::iterator item;
6338 size_t itemsToMakeLostCount;
6340 VmaAllocationRequestType type;
6342 VkDeviceSize CalcCost()
const
6344 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6352 class VmaBlockMetadata
6356 virtual ~VmaBlockMetadata() { }
6357 virtual void Init(VkDeviceSize size) { m_Size = size; }
6360 virtual bool Validate()
const = 0;
6361 VkDeviceSize GetSize()
const {
return m_Size; }
6362 virtual size_t GetAllocationCount()
const = 0;
6363 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6364 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6366 virtual bool IsEmpty()
const = 0;
6368 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6370 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6372 #if VMA_STATS_STRING_ENABLED
6373 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6379 virtual bool CreateAllocationRequest(
6380 uint32_t currentFrameIndex,
6381 uint32_t frameInUseCount,
6382 VkDeviceSize bufferImageGranularity,
6383 VkDeviceSize allocSize,
6384 VkDeviceSize allocAlignment,
6386 VmaSuballocationType allocType,
6387 bool canMakeOtherLost,
6390 VmaAllocationRequest* pAllocationRequest) = 0;
6392 virtual bool MakeRequestedAllocationsLost(
6393 uint32_t currentFrameIndex,
6394 uint32_t frameInUseCount,
6395 VmaAllocationRequest* pAllocationRequest) = 0;
6397 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6399 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6403 const VmaAllocationRequest& request,
6404 VmaSuballocationType type,
6405 VkDeviceSize allocSize,
6410 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6413 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6415 #if VMA_STATS_STRING_ENABLED
6416 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6417 VkDeviceSize unusedBytes,
6418 size_t allocationCount,
6419 size_t unusedRangeCount)
const;
6420 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6421 VkDeviceSize offset,
6423 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6424 VkDeviceSize offset,
6425 VkDeviceSize size)
const;
6426 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6430 VkDeviceSize m_Size;
6431 const VkAllocationCallbacks* m_pAllocationCallbacks;
6434 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6435 VMA_ASSERT(0 && "Validation failed: " #cond); \
6439 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6441 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6444 virtual ~VmaBlockMetadata_Generic();
6445 virtual void Init(VkDeviceSize size);
6447 virtual bool Validate()
const;
6448 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6449 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6450 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6451 virtual bool IsEmpty()
const;
6453 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6454 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6456 #if VMA_STATS_STRING_ENABLED
6457 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6460 virtual bool CreateAllocationRequest(
6461 uint32_t currentFrameIndex,
6462 uint32_t frameInUseCount,
6463 VkDeviceSize bufferImageGranularity,
6464 VkDeviceSize allocSize,
6465 VkDeviceSize allocAlignment,
6467 VmaSuballocationType allocType,
6468 bool canMakeOtherLost,
6470 VmaAllocationRequest* pAllocationRequest);
6472 virtual bool MakeRequestedAllocationsLost(
6473 uint32_t currentFrameIndex,
6474 uint32_t frameInUseCount,
6475 VmaAllocationRequest* pAllocationRequest);
6477 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6479 virtual VkResult CheckCorruption(
const void* pBlockData);
6482 const VmaAllocationRequest& request,
6483 VmaSuballocationType type,
6484 VkDeviceSize allocSize,
6488 virtual void FreeAtOffset(VkDeviceSize offset);
6493 bool IsBufferImageGranularityConflictPossible(
6494 VkDeviceSize bufferImageGranularity,
6495 VmaSuballocationType& inOutPrevSuballocType)
const;
6498 friend class VmaDefragmentationAlgorithm_Generic;
6499 friend class VmaDefragmentationAlgorithm_Fast;
6501 uint32_t m_FreeCount;
6502 VkDeviceSize m_SumFreeSize;
6503 VmaSuballocationList m_Suballocations;
6506 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6508 bool ValidateFreeSuballocationList()
const;
6512 bool CheckAllocation(
6513 uint32_t currentFrameIndex,
6514 uint32_t frameInUseCount,
6515 VkDeviceSize bufferImageGranularity,
6516 VkDeviceSize allocSize,
6517 VkDeviceSize allocAlignment,
6518 VmaSuballocationType allocType,
6519 VmaSuballocationList::const_iterator suballocItem,
6520 bool canMakeOtherLost,
6521 VkDeviceSize* pOffset,
6522 size_t* itemsToMakeLostCount,
6523 VkDeviceSize* pSumFreeSize,
6524 VkDeviceSize* pSumItemSize)
const;
6526 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6530 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6533 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6536 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6617 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6619 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6622 virtual ~VmaBlockMetadata_Linear();
6623 virtual void Init(VkDeviceSize size);
6625 virtual bool Validate()
const;
6626 virtual size_t GetAllocationCount()
const;
6627 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6628 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6629 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6631 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6632 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6634 #if VMA_STATS_STRING_ENABLED
6635 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6638 virtual bool CreateAllocationRequest(
6639 uint32_t currentFrameIndex,
6640 uint32_t frameInUseCount,
6641 VkDeviceSize bufferImageGranularity,
6642 VkDeviceSize allocSize,
6643 VkDeviceSize allocAlignment,
6645 VmaSuballocationType allocType,
6646 bool canMakeOtherLost,
6648 VmaAllocationRequest* pAllocationRequest);
6650 virtual bool MakeRequestedAllocationsLost(
6651 uint32_t currentFrameIndex,
6652 uint32_t frameInUseCount,
6653 VmaAllocationRequest* pAllocationRequest);
6655 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6657 virtual VkResult CheckCorruption(
const void* pBlockData);
6660 const VmaAllocationRequest& request,
6661 VmaSuballocationType type,
6662 VkDeviceSize allocSize,
6666 virtual void FreeAtOffset(VkDeviceSize offset);
6676 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6678 enum SECOND_VECTOR_MODE
6680 SECOND_VECTOR_EMPTY,
6685 SECOND_VECTOR_RING_BUFFER,
6691 SECOND_VECTOR_DOUBLE_STACK,
6694 VkDeviceSize m_SumFreeSize;
6695 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6696 uint32_t m_1stVectorIndex;
6697 SECOND_VECTOR_MODE m_2ndVectorMode;
6699 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6700 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6701 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6702 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6705 size_t m_1stNullItemsBeginCount;
6707 size_t m_1stNullItemsMiddleCount;
6709 size_t m_2ndNullItemsCount;
6711 bool ShouldCompact1st()
const;
6712 void CleanupAfterFree();
6714 bool CreateAllocationRequest_LowerAddress(
6715 uint32_t currentFrameIndex,
6716 uint32_t frameInUseCount,
6717 VkDeviceSize bufferImageGranularity,
6718 VkDeviceSize allocSize,
6719 VkDeviceSize allocAlignment,
6720 VmaSuballocationType allocType,
6721 bool canMakeOtherLost,
6723 VmaAllocationRequest* pAllocationRequest);
6724 bool CreateAllocationRequest_UpperAddress(
6725 uint32_t currentFrameIndex,
6726 uint32_t frameInUseCount,
6727 VkDeviceSize bufferImageGranularity,
6728 VkDeviceSize allocSize,
6729 VkDeviceSize allocAlignment,
6730 VmaSuballocationType allocType,
6731 bool canMakeOtherLost,
6733 VmaAllocationRequest* pAllocationRequest);
6747 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6749 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6752 virtual ~VmaBlockMetadata_Buddy();
6753 virtual void Init(VkDeviceSize size);
6755 virtual bool Validate()
const;
6756 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6757 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6758 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6759 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6761 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6762 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6764 #if VMA_STATS_STRING_ENABLED
6765 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6768 virtual bool CreateAllocationRequest(
6769 uint32_t currentFrameIndex,
6770 uint32_t frameInUseCount,
6771 VkDeviceSize bufferImageGranularity,
6772 VkDeviceSize allocSize,
6773 VkDeviceSize allocAlignment,
6775 VmaSuballocationType allocType,
6776 bool canMakeOtherLost,
6778 VmaAllocationRequest* pAllocationRequest);
6780 virtual bool MakeRequestedAllocationsLost(
6781 uint32_t currentFrameIndex,
6782 uint32_t frameInUseCount,
6783 VmaAllocationRequest* pAllocationRequest);
6785 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6787 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6790 const VmaAllocationRequest& request,
6791 VmaSuballocationType type,
6792 VkDeviceSize allocSize,
6795 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6796 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6799 static const VkDeviceSize MIN_NODE_SIZE = 32;
6800 static const size_t MAX_LEVELS = 30;
6802 struct ValidationContext
6804 size_t calculatedAllocationCount;
6805 size_t calculatedFreeCount;
6806 VkDeviceSize calculatedSumFreeSize;
6808 ValidationContext() :
6809 calculatedAllocationCount(0),
6810 calculatedFreeCount(0),
6811 calculatedSumFreeSize(0) { }
6816 VkDeviceSize offset;
6846 VkDeviceSize m_UsableSize;
6847 uint32_t m_LevelCount;
6853 } m_FreeList[MAX_LEVELS];
6855 size_t m_AllocationCount;
6859 VkDeviceSize m_SumFreeSize;
6861 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6862 void DeleteNode(Node* node);
6863 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6864 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6865 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6867 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6868 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6872 void AddToFreeListFront(uint32_t level, Node* node);
6876 void RemoveFromFreeList(uint32_t level, Node* node);
6878 #if VMA_STATS_STRING_ENABLED
6879 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6889 class VmaDeviceMemoryBlock
6891 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6893 VmaBlockMetadata* m_pMetadata;
6897 ~VmaDeviceMemoryBlock()
6899 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6900 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6907 uint32_t newMemoryTypeIndex,
6908 VkDeviceMemory newMemory,
6909 VkDeviceSize newSize,
6911 uint32_t algorithm);
6915 VmaPool GetParentPool()
const {
return m_hParentPool; }
6916 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6917 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6918 uint32_t GetId()
const {
return m_Id; }
6919 void* GetMappedData()
const {
return m_pMappedData; }
6922 bool Validate()
const;
6927 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6930 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6931 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6933 VkResult BindBufferMemory(
6936 VkDeviceSize allocationLocalOffset,
6939 VkResult BindImageMemory(
6942 VkDeviceSize allocationLocalOffset,
6948 uint32_t m_MemoryTypeIndex;
6950 VkDeviceMemory m_hMemory;
6958 uint32_t m_MapCount;
6959 void* m_pMappedData;
6962 struct VmaPointerLess
6964 bool operator()(
const void* lhs,
const void* rhs)
const
6970 struct VmaDefragmentationMove
6972 size_t srcBlockIndex;
6973 size_t dstBlockIndex;
6974 VkDeviceSize srcOffset;
6975 VkDeviceSize dstOffset;
6978 VmaDeviceMemoryBlock* pSrcBlock;
6979 VmaDeviceMemoryBlock* pDstBlock;
6982 class VmaDefragmentationAlgorithm;
6990 struct VmaBlockVector
6992 VMA_CLASS_NO_COPY(VmaBlockVector)
6997 uint32_t memoryTypeIndex,
6998 VkDeviceSize preferredBlockSize,
6999 size_t minBlockCount,
7000 size_t maxBlockCount,
7001 VkDeviceSize bufferImageGranularity,
7002 uint32_t frameInUseCount,
7003 bool explicitBlockSize,
7004 uint32_t algorithm);
7007 VkResult CreateMinBlocks();
7009 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7010 VmaPool GetParentPool()
const {
return m_hParentPool; }
7011 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7012 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7013 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7014 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7015 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7016 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7021 bool IsCorruptionDetectionEnabled()
const;
7024 uint32_t currentFrameIndex,
7026 VkDeviceSize alignment,
7028 VmaSuballocationType suballocType,
7029 size_t allocationCount,
7037 #if VMA_STATS_STRING_ENABLED
7038 void PrintDetailedMap(
class VmaJsonWriter& json);
7041 void MakePoolAllocationsLost(
7042 uint32_t currentFrameIndex,
7043 size_t* pLostAllocationCount);
7044 VkResult CheckCorruption();
7048 class VmaBlockVectorDefragmentationContext* pCtx,
7050 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7051 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7052 VkCommandBuffer commandBuffer);
7053 void DefragmentationEnd(
7054 class VmaBlockVectorDefragmentationContext* pCtx,
7058 uint32_t ProcessDefragmentations(
7059 class VmaBlockVectorDefragmentationContext *pCtx,
7062 void CommitDefragmentations(
7063 class VmaBlockVectorDefragmentationContext *pCtx,
7069 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7070 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7071 size_t CalcAllocationCount()
const;
7072 bool IsBufferImageGranularityConflictPossible()
const;
7075 friend class VmaDefragmentationAlgorithm_Generic;
7079 const uint32_t m_MemoryTypeIndex;
7080 const VkDeviceSize m_PreferredBlockSize;
7081 const size_t m_MinBlockCount;
7082 const size_t m_MaxBlockCount;
7083 const VkDeviceSize m_BufferImageGranularity;
7084 const uint32_t m_FrameInUseCount;
7085 const bool m_ExplicitBlockSize;
7086 const uint32_t m_Algorithm;
7087 VMA_RW_MUTEX m_Mutex;
7091 bool m_HasEmptyBlock;
7093 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7094 uint32_t m_NextBlockId;
7096 VkDeviceSize CalcMaxBlockSize()
const;
7099 void Remove(VmaDeviceMemoryBlock* pBlock);
7103 void IncrementallySortBlocks();
7105 VkResult AllocatePage(
7106 uint32_t currentFrameIndex,
7108 VkDeviceSize alignment,
7110 VmaSuballocationType suballocType,
7114 VkResult AllocateFromBlock(
7115 VmaDeviceMemoryBlock* pBlock,
7116 uint32_t currentFrameIndex,
7118 VkDeviceSize alignment,
7121 VmaSuballocationType suballocType,
7125 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7128 void ApplyDefragmentationMovesCpu(
7129 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7130 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7132 void ApplyDefragmentationMovesGpu(
7133 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7134 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7135 VkCommandBuffer commandBuffer);
7143 void UpdateHasEmptyBlock();
7148 VMA_CLASS_NO_COPY(VmaPool_T)
7150 VmaBlockVector m_BlockVector;
7155 VkDeviceSize preferredBlockSize);
7158 uint32_t GetId()
const {
return m_Id; }
7159 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7161 const char* GetName()
const {
return m_Name; }
7162 void SetName(
const char* pName);
7164 #if VMA_STATS_STRING_ENABLED
7180 class VmaDefragmentationAlgorithm
7182 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7184 VmaDefragmentationAlgorithm(
7186 VmaBlockVector* pBlockVector,
7187 uint32_t currentFrameIndex) :
7188 m_hAllocator(hAllocator),
7189 m_pBlockVector(pBlockVector),
7190 m_CurrentFrameIndex(currentFrameIndex)
7193 virtual ~VmaDefragmentationAlgorithm()
7197 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7198 virtual void AddAll() = 0;
7200 virtual VkResult Defragment(
7201 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7202 VkDeviceSize maxBytesToMove,
7203 uint32_t maxAllocationsToMove,
7206 virtual VkDeviceSize GetBytesMoved()
const = 0;
7207 virtual uint32_t GetAllocationsMoved()
const = 0;
7211 VmaBlockVector*
const m_pBlockVector;
7212 const uint32_t m_CurrentFrameIndex;
7214 struct AllocationInfo
7217 VkBool32* m_pChanged;
7220 m_hAllocation(VK_NULL_HANDLE),
7221 m_pChanged(VMA_NULL)
7225 m_hAllocation(hAlloc),
7226 m_pChanged(pChanged)
7232 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7234 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7236 VmaDefragmentationAlgorithm_Generic(
7238 VmaBlockVector* pBlockVector,
7239 uint32_t currentFrameIndex,
7240 bool overlappingMoveSupported);
7241 virtual ~VmaDefragmentationAlgorithm_Generic();
7243 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7244 virtual void AddAll() { m_AllAllocations =
true; }
7246 virtual VkResult Defragment(
7247 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7248 VkDeviceSize maxBytesToMove,
7249 uint32_t maxAllocationsToMove,
7252 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7253 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7256 uint32_t m_AllocationCount;
7257 bool m_AllAllocations;
7259 VkDeviceSize m_BytesMoved;
7260 uint32_t m_AllocationsMoved;
7262 struct AllocationInfoSizeGreater
7264 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7266 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7270 struct AllocationInfoOffsetGreater
7272 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7274 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7280 size_t m_OriginalBlockIndex;
7281 VmaDeviceMemoryBlock* m_pBlock;
7282 bool m_HasNonMovableAllocations;
7283 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7285 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7286 m_OriginalBlockIndex(SIZE_MAX),
7288 m_HasNonMovableAllocations(true),
7289 m_Allocations(pAllocationCallbacks)
7293 void CalcHasNonMovableAllocations()
7295 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7296 const size_t defragmentAllocCount = m_Allocations.size();
7297 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7300 void SortAllocationsBySizeDescending()
7302 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7305 void SortAllocationsByOffsetDescending()
7307 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7311 struct BlockPointerLess
7313 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7315 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7317 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7319 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7325 struct BlockInfoCompareMoveDestination
7327 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7329 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7333 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7337 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7345 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7346 BlockInfoVector m_Blocks;
7348 VkResult DefragmentRound(
7349 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7350 VkDeviceSize maxBytesToMove,
7351 uint32_t maxAllocationsToMove,
7352 bool freeOldAllocations);
7354 size_t CalcBlocksWithNonMovableCount()
const;
7356 static bool MoveMakesSense(
7357 size_t dstBlockIndex, VkDeviceSize dstOffset,
7358 size_t srcBlockIndex, VkDeviceSize srcOffset);
7361 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7363 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7365 VmaDefragmentationAlgorithm_Fast(
7367 VmaBlockVector* pBlockVector,
7368 uint32_t currentFrameIndex,
7369 bool overlappingMoveSupported);
7370 virtual ~VmaDefragmentationAlgorithm_Fast();
7372 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7373 virtual void AddAll() { m_AllAllocations =
true; }
7375 virtual VkResult Defragment(
7376 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7377 VkDeviceSize maxBytesToMove,
7378 uint32_t maxAllocationsToMove,
7381 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7382 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7387 size_t origBlockIndex;
7390 class FreeSpaceDatabase
7396 s.blockInfoIndex = SIZE_MAX;
7397 for(
size_t i = 0; i < MAX_COUNT; ++i)
7399 m_FreeSpaces[i] = s;
7403 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7405 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7411 size_t bestIndex = SIZE_MAX;
7412 for(
size_t i = 0; i < MAX_COUNT; ++i)
7415 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7420 if(m_FreeSpaces[i].size < size &&
7421 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7427 if(bestIndex != SIZE_MAX)
7429 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7430 m_FreeSpaces[bestIndex].offset = offset;
7431 m_FreeSpaces[bestIndex].size = size;
7435 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7436 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7438 size_t bestIndex = SIZE_MAX;
7439 VkDeviceSize bestFreeSpaceAfter = 0;
7440 for(
size_t i = 0; i < MAX_COUNT; ++i)
7443 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7445 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7447 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7449 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7451 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7454 bestFreeSpaceAfter = freeSpaceAfter;
7460 if(bestIndex != SIZE_MAX)
7462 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7463 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7465 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7468 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7469 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7470 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7475 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7485 static const size_t MAX_COUNT = 4;
7489 size_t blockInfoIndex;
7490 VkDeviceSize offset;
7492 } m_FreeSpaces[MAX_COUNT];
7495 const bool m_OverlappingMoveSupported;
7497 uint32_t m_AllocationCount;
7498 bool m_AllAllocations;
7500 VkDeviceSize m_BytesMoved;
7501 uint32_t m_AllocationsMoved;
7503 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7505 void PreprocessMetadata();
7506 void PostprocessMetadata();
7507 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7510 struct VmaBlockDefragmentationContext
7514 BLOCK_FLAG_USED = 0x00000001,
7520 class VmaBlockVectorDefragmentationContext
7522 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7526 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7527 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7528 uint32_t defragmentationMovesProcessed;
7529 uint32_t defragmentationMovesCommitted;
7530 bool hasDefragmentationPlan;
7532 VmaBlockVectorDefragmentationContext(
7535 VmaBlockVector* pBlockVector,
7536 uint32_t currFrameIndex);
7537 ~VmaBlockVectorDefragmentationContext();
7539 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7540 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7541 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7543 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7544 void AddAll() { m_AllAllocations =
true; }
7553 VmaBlockVector*
const m_pBlockVector;
7554 const uint32_t m_CurrFrameIndex;
7556 VmaDefragmentationAlgorithm* m_pAlgorithm;
7564 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7565 bool m_AllAllocations;
7568 struct VmaDefragmentationContext_T
7571 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7573 VmaDefragmentationContext_T(
7575 uint32_t currFrameIndex,
7578 ~VmaDefragmentationContext_T();
7580 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7581 void AddAllocations(
7582 uint32_t allocationCount,
7584 VkBool32* pAllocationsChanged);
7592 VkResult Defragment(
7593 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7594 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7598 VkResult DefragmentPassEnd();
7602 const uint32_t m_CurrFrameIndex;
7603 const uint32_t m_Flags;
7606 VkDeviceSize m_MaxCpuBytesToMove;
7607 uint32_t m_MaxCpuAllocationsToMove;
7608 VkDeviceSize m_MaxGpuBytesToMove;
7609 uint32_t m_MaxGpuAllocationsToMove;
7612 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7614 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7617 #if VMA_RECORDING_ENABLED
7624 void WriteConfiguration(
7625 const VkPhysicalDeviceProperties& devProps,
7626 const VkPhysicalDeviceMemoryProperties& memProps,
7627 uint32_t vulkanApiVersion,
7628 bool dedicatedAllocationExtensionEnabled,
7629 bool bindMemory2ExtensionEnabled,
7630 bool memoryBudgetExtensionEnabled,
7631 bool deviceCoherentMemoryExtensionEnabled);
7634 void RecordCreateAllocator(uint32_t frameIndex);
7635 void RecordDestroyAllocator(uint32_t frameIndex);
7636 void RecordCreatePool(uint32_t frameIndex,
7639 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7640 void RecordAllocateMemory(uint32_t frameIndex,
7641 const VkMemoryRequirements& vkMemReq,
7644 void RecordAllocateMemoryPages(uint32_t frameIndex,
7645 const VkMemoryRequirements& vkMemReq,
7647 uint64_t allocationCount,
7649 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7650 const VkMemoryRequirements& vkMemReq,
7651 bool requiresDedicatedAllocation,
7652 bool prefersDedicatedAllocation,
7655 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7656 const VkMemoryRequirements& vkMemReq,
7657 bool requiresDedicatedAllocation,
7658 bool prefersDedicatedAllocation,
7661 void RecordFreeMemory(uint32_t frameIndex,
7663 void RecordFreeMemoryPages(uint32_t frameIndex,
7664 uint64_t allocationCount,
7666 void RecordSetAllocationUserData(uint32_t frameIndex,
7668 const void* pUserData);
7669 void RecordCreateLostAllocation(uint32_t frameIndex,
7671 void RecordMapMemory(uint32_t frameIndex,
7673 void RecordUnmapMemory(uint32_t frameIndex,
7675 void RecordFlushAllocation(uint32_t frameIndex,
7676 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7677 void RecordInvalidateAllocation(uint32_t frameIndex,
7678 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7679 void RecordCreateBuffer(uint32_t frameIndex,
7680 const VkBufferCreateInfo& bufCreateInfo,
7683 void RecordCreateImage(uint32_t frameIndex,
7684 const VkImageCreateInfo& imageCreateInfo,
7687 void RecordDestroyBuffer(uint32_t frameIndex,
7689 void RecordDestroyImage(uint32_t frameIndex,
7691 void RecordTouchAllocation(uint32_t frameIndex,
7693 void RecordGetAllocationInfo(uint32_t frameIndex,
7695 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7697 void RecordDefragmentationBegin(uint32_t frameIndex,
7700 void RecordDefragmentationEnd(uint32_t frameIndex,
7702 void RecordSetPoolName(uint32_t frameIndex,
7713 class UserDataString
7717 const char* GetString()
const {
return m_Str; }
7727 VMA_MUTEX m_FileMutex;
7728 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
7730 void GetBasicParams(CallParams& outParams);
7733 template<
typename T>
7734 void PrintPointerList(uint64_t count,
const T* pItems)
7738 fprintf(m_File,
"%p", pItems[0]);
7739 for(uint64_t i = 1; i < count; ++i)
7741 fprintf(m_File,
" %p", pItems[i]);
7746 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7750 #endif // #if VMA_RECORDING_ENABLED
7755 class VmaAllocationObjectAllocator
7757 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7759 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7761 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7766 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7769 struct VmaCurrentBudgetData
7771 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7772 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7774 #if VMA_MEMORY_BUDGET
7775 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7776 VMA_RW_MUTEX m_BudgetMutex;
7777 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7778 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7779 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7780 #endif // #if VMA_MEMORY_BUDGET
7782 VmaCurrentBudgetData()
7784 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7786 m_BlockBytes[heapIndex] = 0;
7787 m_AllocationBytes[heapIndex] = 0;
7788 #if VMA_MEMORY_BUDGET
7789 m_VulkanUsage[heapIndex] = 0;
7790 m_VulkanBudget[heapIndex] = 0;
7791 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7795 #if VMA_MEMORY_BUDGET
7796 m_OperationsSinceBudgetFetch = 0;
7800 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7802 m_AllocationBytes[heapIndex] += allocationSize;
7803 #if VMA_MEMORY_BUDGET
7804 ++m_OperationsSinceBudgetFetch;
7808 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7810 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7811 m_AllocationBytes[heapIndex] -= allocationSize;
7812 #if VMA_MEMORY_BUDGET
7813 ++m_OperationsSinceBudgetFetch;
7819 struct VmaAllocator_T
7821 VMA_CLASS_NO_COPY(VmaAllocator_T)
7824 uint32_t m_VulkanApiVersion;
7825 bool m_UseKhrDedicatedAllocation;
7826 bool m_UseKhrBindMemory2;
7827 bool m_UseExtMemoryBudget;
7828 bool m_UseAmdDeviceCoherentMemory;
7829 bool m_UseKhrBufferDeviceAddress;
7831 VkInstance m_hInstance;
7832 bool m_AllocationCallbacksSpecified;
7833 VkAllocationCallbacks m_AllocationCallbacks;
7835 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7838 uint32_t m_HeapSizeLimitMask;
7840 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7841 VkPhysicalDeviceMemoryProperties m_MemProps;
7844 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7847 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7848 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7849 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7851 VmaCurrentBudgetData m_Budget;
7857 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7859 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7863 return m_VulkanFunctions;
7866 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7868 VkDeviceSize GetBufferImageGranularity()
const
7871 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7872 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7875 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7876 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7878 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7880 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7881 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7884 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7886 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7887 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7890 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7892 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7893 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7894 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7897 bool IsIntegratedGpu()
const
7899 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7902 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7904 #if VMA_RECORDING_ENABLED
7905 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7908 void GetBufferMemoryRequirements(
7910 VkMemoryRequirements& memReq,
7911 bool& requiresDedicatedAllocation,
7912 bool& prefersDedicatedAllocation)
const;
7913 void GetImageMemoryRequirements(
7915 VkMemoryRequirements& memReq,
7916 bool& requiresDedicatedAllocation,
7917 bool& prefersDedicatedAllocation)
const;
7920 VkResult AllocateMemory(
7921 const VkMemoryRequirements& vkMemReq,
7922 bool requiresDedicatedAllocation,
7923 bool prefersDedicatedAllocation,
7924 VkBuffer dedicatedBuffer,
7925 VkBufferUsageFlags dedicatedBufferUsage,
7926 VkImage dedicatedImage,
7928 VmaSuballocationType suballocType,
7929 size_t allocationCount,
7934 size_t allocationCount,
7937 VkResult ResizeAllocation(
7939 VkDeviceSize newSize);
7941 void CalculateStats(
VmaStats* pStats);
7944 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7946 #if VMA_STATS_STRING_ENABLED
7947 void PrintDetailedMap(
class VmaJsonWriter& json);
7950 VkResult DefragmentationBegin(
7954 VkResult DefragmentationEnd(
7957 VkResult DefragmentationPassBegin(
7960 VkResult DefragmentationPassEnd(
7967 void DestroyPool(
VmaPool pool);
7970 void SetCurrentFrameIndex(uint32_t frameIndex);
7971 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7973 void MakePoolAllocationsLost(
7975 size_t* pLostAllocationCount);
7976 VkResult CheckPoolCorruption(
VmaPool hPool);
7977 VkResult CheckCorruption(uint32_t memoryTypeBits);
7982 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7984 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7986 VkResult BindVulkanBuffer(
7987 VkDeviceMemory memory,
7988 VkDeviceSize memoryOffset,
7992 VkResult BindVulkanImage(
7993 VkDeviceMemory memory,
7994 VkDeviceSize memoryOffset,
8001 VkResult BindBufferMemory(
8003 VkDeviceSize allocationLocalOffset,
8006 VkResult BindImageMemory(
8008 VkDeviceSize allocationLocalOffset,
8012 VkResult FlushOrInvalidateAllocation(
8014 VkDeviceSize offset, VkDeviceSize size,
8015 VMA_CACHE_OPERATION op);
8016 VkResult FlushOrInvalidateAllocations(
8017 uint32_t allocationCount,
8019 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8020 VMA_CACHE_OPERATION op);
8022 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8028 uint32_t GetGpuDefragmentationMemoryTypeBits();
8031 VkDeviceSize m_PreferredLargeHeapBlockSize;
8033 VkPhysicalDevice m_PhysicalDevice;
8034 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8035 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8037 VMA_RW_MUTEX m_PoolsMutex;
8039 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
8040 uint32_t m_NextPoolId;
8045 uint32_t m_GlobalMemoryTypeBits;
8047 #if VMA_RECORDING_ENABLED
8048 VmaRecorder* m_pRecorder;
8053 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8054 void ImportVulkanFunctions_Static();
8059 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8060 void ImportVulkanFunctions_Dynamic();
8063 void ValidateVulkanFunctions();
8065 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8067 VkResult AllocateMemoryOfType(
8069 VkDeviceSize alignment,
8070 bool dedicatedAllocation,
8071 VkBuffer dedicatedBuffer,
8072 VkBufferUsageFlags dedicatedBufferUsage,
8073 VkImage dedicatedImage,
8075 uint32_t memTypeIndex,
8076 VmaSuballocationType suballocType,
8077 size_t allocationCount,
8081 VkResult AllocateDedicatedMemoryPage(
8083 VmaSuballocationType suballocType,
8084 uint32_t memTypeIndex,
8085 const VkMemoryAllocateInfo& allocInfo,
8087 bool isUserDataString,
8092 VkResult AllocateDedicatedMemory(
8094 VmaSuballocationType suballocType,
8095 uint32_t memTypeIndex,
8098 bool isUserDataString,
8100 VkBuffer dedicatedBuffer,
8101 VkBufferUsageFlags dedicatedBufferUsage,
8102 VkImage dedicatedImage,
8103 size_t allocationCount,
8112 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8114 uint32_t CalculateGlobalMemoryTypeBits()
const;
8116 bool GetFlushOrInvalidateRange(
8118 VkDeviceSize offset, VkDeviceSize size,
8119 VkMappedMemoryRange& outRange)
const;
8121 #if VMA_MEMORY_BUDGET
8122 void UpdateVulkanBudget();
8123 #endif // #if VMA_MEMORY_BUDGET
8129 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8131 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8134 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8136 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8139 template<
typename T>
8142 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8145 template<
typename T>
8146 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8148 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8151 template<
typename T>
8152 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8157 VmaFree(hAllocator, ptr);
8161 template<
typename T>
8162 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8166 for(
size_t i = count; i--; )
8168 VmaFree(hAllocator, ptr);
8175 #if VMA_STATS_STRING_ENABLED
8177 class VmaStringBuilder
8180 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8181 size_t GetLength()
const {
return m_Data.size(); }
8182 const char* GetData()
const {
return m_Data.data(); }
8184 void Add(
char ch) { m_Data.push_back(ch); }
8185 void Add(
const char* pStr);
8186 void AddNewLine() { Add(
'\n'); }
8187 void AddNumber(uint32_t num);
8188 void AddNumber(uint64_t num);
8189 void AddPointer(
const void* ptr);
8192 VmaVector< char, VmaStlAllocator<char> > m_Data;
8195 void VmaStringBuilder::Add(
const char* pStr)
8197 const size_t strLen = strlen(pStr);
8200 const size_t oldCount = m_Data.size();
8201 m_Data.resize(oldCount + strLen);
8202 memcpy(m_Data.data() + oldCount, pStr, strLen);
8206 void VmaStringBuilder::AddNumber(uint32_t num)
8213 *--p =
'0' + (num % 10);
8220 void VmaStringBuilder::AddNumber(uint64_t num)
8227 *--p =
'0' + (num % 10);
8234 void VmaStringBuilder::AddPointer(
const void* ptr)
8237 VmaPtrToStr(buf,
sizeof(buf), ptr);
8241 #endif // #if VMA_STATS_STRING_ENABLED
8246 #if VMA_STATS_STRING_ENABLED
8250 VMA_CLASS_NO_COPY(VmaJsonWriter)
8252 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8255 void BeginObject(
bool singleLine =
false);
8258 void BeginArray(
bool singleLine =
false);
8261 void WriteString(
const char* pStr);
8262 void BeginString(
const char* pStr = VMA_NULL);
8263 void ContinueString(
const char* pStr);
8264 void ContinueString(uint32_t n);
8265 void ContinueString(uint64_t n);
8266 void ContinueString_Pointer(
const void* ptr);
8267 void EndString(
const char* pStr = VMA_NULL);
8269 void WriteNumber(uint32_t n);
8270 void WriteNumber(uint64_t n);
8271 void WriteBool(
bool b);
8275 static const char*
const INDENT;
8277 enum COLLECTION_TYPE
8279 COLLECTION_TYPE_OBJECT,
8280 COLLECTION_TYPE_ARRAY,
8284 COLLECTION_TYPE type;
8285 uint32_t valueCount;
8286 bool singleLineMode;
8289 VmaStringBuilder& m_SB;
8290 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8291 bool m_InsideString;
8293 void BeginValue(
bool isString);
8294 void WriteIndent(
bool oneLess =
false);
8297 const char*
const VmaJsonWriter::INDENT =
" ";
8299 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8301 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8302 m_InsideString(false)
8306 VmaJsonWriter::~VmaJsonWriter()
8308 VMA_ASSERT(!m_InsideString);
8309 VMA_ASSERT(m_Stack.empty());
8312 void VmaJsonWriter::BeginObject(
bool singleLine)
8314 VMA_ASSERT(!m_InsideString);
8320 item.type = COLLECTION_TYPE_OBJECT;
8321 item.valueCount = 0;
8322 item.singleLineMode = singleLine;
8323 m_Stack.push_back(item);
8326 void VmaJsonWriter::EndObject()
8328 VMA_ASSERT(!m_InsideString);
8333 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8337 void VmaJsonWriter::BeginArray(
bool singleLine)
8339 VMA_ASSERT(!m_InsideString);
8345 item.type = COLLECTION_TYPE_ARRAY;
8346 item.valueCount = 0;
8347 item.singleLineMode = singleLine;
8348 m_Stack.push_back(item);
8351 void VmaJsonWriter::EndArray()
8353 VMA_ASSERT(!m_InsideString);
8358 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8362 void VmaJsonWriter::WriteString(
const char* pStr)
8368 void VmaJsonWriter::BeginString(
const char* pStr)
8370 VMA_ASSERT(!m_InsideString);
8374 m_InsideString =
true;
8375 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8377 ContinueString(pStr);
8381 void VmaJsonWriter::ContinueString(
const char* pStr)
8383 VMA_ASSERT(m_InsideString);
8385 const size_t strLen = strlen(pStr);
8386 for(
size_t i = 0; i < strLen; ++i)
8419 VMA_ASSERT(0 &&
"Character not currently supported.");
8425 void VmaJsonWriter::ContinueString(uint32_t n)
8427 VMA_ASSERT(m_InsideString);
8431 void VmaJsonWriter::ContinueString(uint64_t n)
8433 VMA_ASSERT(m_InsideString);
8437 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8439 VMA_ASSERT(m_InsideString);
8440 m_SB.AddPointer(ptr);
8443 void VmaJsonWriter::EndString(
const char* pStr)
8445 VMA_ASSERT(m_InsideString);
8446 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8448 ContinueString(pStr);
8451 m_InsideString =
false;
8454 void VmaJsonWriter::WriteNumber(uint32_t n)
8456 VMA_ASSERT(!m_InsideString);
8461 void VmaJsonWriter::WriteNumber(uint64_t n)
8463 VMA_ASSERT(!m_InsideString);
8468 void VmaJsonWriter::WriteBool(
bool b)
8470 VMA_ASSERT(!m_InsideString);
8472 m_SB.Add(b ?
"true" :
"false");
8475 void VmaJsonWriter::WriteNull()
8477 VMA_ASSERT(!m_InsideString);
8482 void VmaJsonWriter::BeginValue(
bool isString)
8484 if(!m_Stack.empty())
8486 StackItem& currItem = m_Stack.back();
8487 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8488 currItem.valueCount % 2 == 0)
8490 VMA_ASSERT(isString);
8493 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8494 currItem.valueCount % 2 != 0)
8498 else if(currItem.valueCount > 0)
8507 ++currItem.valueCount;
8511 void VmaJsonWriter::WriteIndent(
bool oneLess)
8513 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8517 size_t count = m_Stack.size();
8518 if(count > 0 && oneLess)
8522 for(
size_t i = 0; i < count; ++i)
8529 #endif // #if VMA_STATS_STRING_ENABLED
8533 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8535 if(IsUserDataString())
8537 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8539 FreeUserDataString(hAllocator);
8541 if(pUserData != VMA_NULL)
8543 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8548 m_pUserData = pUserData;
8552 void VmaAllocation_T::ChangeBlockAllocation(
8554 VmaDeviceMemoryBlock* block,
8555 VkDeviceSize offset)
8557 VMA_ASSERT(block != VMA_NULL);
8558 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8561 if(block != m_BlockAllocation.m_Block)
8563 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8564 if(IsPersistentMap())
8566 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8567 block->Map(hAllocator, mapRefCount, VMA_NULL);
8570 m_BlockAllocation.m_Block = block;
8571 m_BlockAllocation.m_Offset = offset;
8574 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8576 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8577 m_BlockAllocation.m_Offset = newOffset;
8580 VkDeviceSize VmaAllocation_T::GetOffset()
const
8584 case ALLOCATION_TYPE_BLOCK:
8585 return m_BlockAllocation.m_Offset;
8586 case ALLOCATION_TYPE_DEDICATED:
8594 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8598 case ALLOCATION_TYPE_BLOCK:
8599 return m_BlockAllocation.m_Block->GetDeviceMemory();
8600 case ALLOCATION_TYPE_DEDICATED:
8601 return m_DedicatedAllocation.m_hMemory;
8604 return VK_NULL_HANDLE;
8608 void* VmaAllocation_T::GetMappedData()
const
8612 case ALLOCATION_TYPE_BLOCK:
8615 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8616 VMA_ASSERT(pBlockData != VMA_NULL);
8617 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8624 case ALLOCATION_TYPE_DEDICATED:
8625 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8626 return m_DedicatedAllocation.m_pMappedData;
8633 bool VmaAllocation_T::CanBecomeLost()
const
8637 case ALLOCATION_TYPE_BLOCK:
8638 return m_BlockAllocation.m_CanBecomeLost;
8639 case ALLOCATION_TYPE_DEDICATED:
8647 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8649 VMA_ASSERT(CanBecomeLost());
8655 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8658 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8663 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8669 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8679 #if VMA_STATS_STRING_ENABLED
8682 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8691 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8693 json.WriteString(
"Type");
8694 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8696 json.WriteString(
"Size");
8697 json.WriteNumber(m_Size);
8699 if(m_pUserData != VMA_NULL)
8701 json.WriteString(
"UserData");
8702 if(IsUserDataString())
8704 json.WriteString((
const char*)m_pUserData);
8709 json.ContinueString_Pointer(m_pUserData);
8714 json.WriteString(
"CreationFrameIndex");
8715 json.WriteNumber(m_CreationFrameIndex);
8717 json.WriteString(
"LastUseFrameIndex");
8718 json.WriteNumber(GetLastUseFrameIndex());
8720 if(m_BufferImageUsage != 0)
8722 json.WriteString(
"Usage");
8723 json.WriteNumber(m_BufferImageUsage);
8729 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8731 VMA_ASSERT(IsUserDataString());
8732 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8733 m_pUserData = VMA_NULL;
8736 void VmaAllocation_T::BlockAllocMap()
8738 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8740 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8746 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8750 void VmaAllocation_T::BlockAllocUnmap()
8752 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8754 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8760 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8764 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8766 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8770 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8772 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8773 *ppData = m_DedicatedAllocation.m_pMappedData;
8779 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8780 return VK_ERROR_MEMORY_MAP_FAILED;
8785 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8786 hAllocator->m_hDevice,
8787 m_DedicatedAllocation.m_hMemory,
8792 if(result == VK_SUCCESS)
8794 m_DedicatedAllocation.m_pMappedData = *ppData;
8801 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8803 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8805 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8810 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8811 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8812 hAllocator->m_hDevice,
8813 m_DedicatedAllocation.m_hMemory);
8818 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8822 #if VMA_STATS_STRING_ENABLED
8824 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8828 json.WriteString(
"Blocks");
8831 json.WriteString(
"Allocations");
8834 json.WriteString(
"UnusedRanges");
8837 json.WriteString(
"UsedBytes");
8840 json.WriteString(
"UnusedBytes");
8845 json.WriteString(
"AllocationSize");
8846 json.BeginObject(
true);
8847 json.WriteString(
"Min");
8849 json.WriteString(
"Avg");
8851 json.WriteString(
"Max");
8858 json.WriteString(
"UnusedRangeSize");
8859 json.BeginObject(
true);
8860 json.WriteString(
"Min");
8862 json.WriteString(
"Avg");
8864 json.WriteString(
"Max");
8872 #endif // #if VMA_STATS_STRING_ENABLED
8874 struct VmaSuballocationItemSizeLess
8877 const VmaSuballocationList::iterator lhs,
8878 const VmaSuballocationList::iterator rhs)
const
8880 return lhs->size < rhs->size;
8883 const VmaSuballocationList::iterator lhs,
8884 VkDeviceSize rhsSize)
const
8886 return lhs->size < rhsSize;
8894 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8896 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8900 #if VMA_STATS_STRING_ENABLED
8902 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8903 VkDeviceSize unusedBytes,
8904 size_t allocationCount,
8905 size_t unusedRangeCount)
const
8909 json.WriteString(
"TotalBytes");
8910 json.WriteNumber(GetSize());
8912 json.WriteString(
"UnusedBytes");
8913 json.WriteNumber(unusedBytes);
8915 json.WriteString(
"Allocations");
8916 json.WriteNumber((uint64_t)allocationCount);
8918 json.WriteString(
"UnusedRanges");
8919 json.WriteNumber((uint64_t)unusedRangeCount);
8921 json.WriteString(
"Suballocations");
8925 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8926 VkDeviceSize offset,
8929 json.BeginObject(
true);
8931 json.WriteString(
"Offset");
8932 json.WriteNumber(offset);
8934 hAllocation->PrintParameters(json);
8939 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8940 VkDeviceSize offset,
8941 VkDeviceSize size)
const
8943 json.BeginObject(
true);
8945 json.WriteString(
"Offset");
8946 json.WriteNumber(offset);
8948 json.WriteString(
"Type");
8949 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8951 json.WriteString(
"Size");
8952 json.WriteNumber(size);
8957 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8963 #endif // #if VMA_STATS_STRING_ENABLED
8968 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8969 VmaBlockMetadata(hAllocator),
8972 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8973 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8977 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8981 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8983 VmaBlockMetadata::Init(size);
8986 m_SumFreeSize = size;
8988 VmaSuballocation suballoc = {};
8989 suballoc.offset = 0;
8990 suballoc.size = size;
8991 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8992 suballoc.hAllocation = VK_NULL_HANDLE;
8994 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8995 m_Suballocations.push_back(suballoc);
8996 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8998 m_FreeSuballocationsBySize.push_back(suballocItem);
9001 bool VmaBlockMetadata_Generic::Validate()
const
9003 VMA_VALIDATE(!m_Suballocations.empty());
9006 VkDeviceSize calculatedOffset = 0;
9008 uint32_t calculatedFreeCount = 0;
9010 VkDeviceSize calculatedSumFreeSize = 0;
9013 size_t freeSuballocationsToRegister = 0;
9015 bool prevFree =
false;
9017 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9018 suballocItem != m_Suballocations.cend();
9021 const VmaSuballocation& subAlloc = *suballocItem;
9024 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9026 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9028 VMA_VALIDATE(!prevFree || !currFree);
9030 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9034 calculatedSumFreeSize += subAlloc.size;
9035 ++calculatedFreeCount;
9036 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9038 ++freeSuballocationsToRegister;
9042 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9046 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9047 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9050 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9053 calculatedOffset += subAlloc.size;
9054 prevFree = currFree;
9059 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9061 VkDeviceSize lastSize = 0;
9062 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9064 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9067 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9069 VMA_VALIDATE(suballocItem->size >= lastSize);
9071 lastSize = suballocItem->size;
9075 VMA_VALIDATE(ValidateFreeSuballocationList());
9076 VMA_VALIDATE(calculatedOffset == GetSize());
9077 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9078 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9083 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9085 if(!m_FreeSuballocationsBySize.empty())
9087 return m_FreeSuballocationsBySize.back()->size;
9095 bool VmaBlockMetadata_Generic::IsEmpty()
const
9097 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9100 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9104 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9116 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9117 suballocItem != m_Suballocations.cend();
9120 const VmaSuballocation& suballoc = *suballocItem;
9121 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9134 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9136 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9138 inoutStats.
size += GetSize();
9145 #if VMA_STATS_STRING_ENABLED
9147 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9149 PrintDetailedMap_Begin(json,
9151 m_Suballocations.size() - (
size_t)m_FreeCount,
9155 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9156 suballocItem != m_Suballocations.cend();
9157 ++suballocItem, ++i)
9159 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9161 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9165 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9169 PrintDetailedMap_End(json);
9172 #endif // #if VMA_STATS_STRING_ENABLED
9174 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9175 uint32_t currentFrameIndex,
9176 uint32_t frameInUseCount,
9177 VkDeviceSize bufferImageGranularity,
9178 VkDeviceSize allocSize,
9179 VkDeviceSize allocAlignment,
9181 VmaSuballocationType allocType,
9182 bool canMakeOtherLost,
9184 VmaAllocationRequest* pAllocationRequest)
9186 VMA_ASSERT(allocSize > 0);
9187 VMA_ASSERT(!upperAddress);
9188 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9189 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9190 VMA_HEAVY_ASSERT(Validate());
9192 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9195 if(canMakeOtherLost ==
false &&
9196 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9202 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9203 if(freeSuballocCount > 0)
9208 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9209 m_FreeSuballocationsBySize.data(),
9210 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9211 allocSize + 2 * VMA_DEBUG_MARGIN,
9212 VmaSuballocationItemSizeLess());
9213 size_t index = it - m_FreeSuballocationsBySize.data();
9214 for(; index < freeSuballocCount; ++index)
9219 bufferImageGranularity,
9223 m_FreeSuballocationsBySize[index],
9225 &pAllocationRequest->offset,
9226 &pAllocationRequest->itemsToMakeLostCount,
9227 &pAllocationRequest->sumFreeSize,
9228 &pAllocationRequest->sumItemSize))
9230 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9235 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9237 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9238 it != m_Suballocations.end();
9241 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9244 bufferImageGranularity,
9250 &pAllocationRequest->offset,
9251 &pAllocationRequest->itemsToMakeLostCount,
9252 &pAllocationRequest->sumFreeSize,
9253 &pAllocationRequest->sumItemSize))
9255 pAllocationRequest->item = it;
9263 for(
size_t index = freeSuballocCount; index--; )
9268 bufferImageGranularity,
9272 m_FreeSuballocationsBySize[index],
9274 &pAllocationRequest->offset,
9275 &pAllocationRequest->itemsToMakeLostCount,
9276 &pAllocationRequest->sumFreeSize,
9277 &pAllocationRequest->sumItemSize))
9279 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9286 if(canMakeOtherLost)
9291 VmaAllocationRequest tmpAllocRequest = {};
9292 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9293 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9294 suballocIt != m_Suballocations.end();
9297 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9298 suballocIt->hAllocation->CanBecomeLost())
9303 bufferImageGranularity,
9309 &tmpAllocRequest.offset,
9310 &tmpAllocRequest.itemsToMakeLostCount,
9311 &tmpAllocRequest.sumFreeSize,
9312 &tmpAllocRequest.sumItemSize))
9316 *pAllocationRequest = tmpAllocRequest;
9317 pAllocationRequest->item = suballocIt;
9320 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9322 *pAllocationRequest = tmpAllocRequest;
9323 pAllocationRequest->item = suballocIt;
9336 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9337 uint32_t currentFrameIndex,
9338 uint32_t frameInUseCount,
9339 VmaAllocationRequest* pAllocationRequest)
9341 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9343 while(pAllocationRequest->itemsToMakeLostCount > 0)
9345 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9347 ++pAllocationRequest->item;
9349 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9350 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9351 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9352 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9354 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9355 --pAllocationRequest->itemsToMakeLostCount;
9363 VMA_HEAVY_ASSERT(Validate());
9364 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9365 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9370 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9372 uint32_t lostAllocationCount = 0;
9373 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9374 it != m_Suballocations.end();
9377 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9378 it->hAllocation->CanBecomeLost() &&
9379 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9381 it = FreeSuballocation(it);
9382 ++lostAllocationCount;
9385 return lostAllocationCount;
9388 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9390 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9391 it != m_Suballocations.end();
9394 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9396 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9398 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9399 return VK_ERROR_VALIDATION_FAILED_EXT;
9401 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9403 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9404 return VK_ERROR_VALIDATION_FAILED_EXT;
9412 void VmaBlockMetadata_Generic::Alloc(
9413 const VmaAllocationRequest& request,
9414 VmaSuballocationType type,
9415 VkDeviceSize allocSize,
9418 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9419 VMA_ASSERT(request.item != m_Suballocations.end());
9420 VmaSuballocation& suballoc = *request.item;
9422 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9424 VMA_ASSERT(request.offset >= suballoc.offset);
9425 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9426 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9427 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9431 UnregisterFreeSuballocation(request.item);
9433 suballoc.offset = request.offset;
9434 suballoc.size = allocSize;
9435 suballoc.type = type;
9436 suballoc.hAllocation = hAllocation;
9441 VmaSuballocation paddingSuballoc = {};
9442 paddingSuballoc.offset = request.offset + allocSize;
9443 paddingSuballoc.size = paddingEnd;
9444 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9445 VmaSuballocationList::iterator next = request.item;
9447 const VmaSuballocationList::iterator paddingEndItem =
9448 m_Suballocations.insert(next, paddingSuballoc);
9449 RegisterFreeSuballocation(paddingEndItem);
9455 VmaSuballocation paddingSuballoc = {};
9456 paddingSuballoc.offset = request.offset - paddingBegin;
9457 paddingSuballoc.size = paddingBegin;
9458 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9459 const VmaSuballocationList::iterator paddingBeginItem =
9460 m_Suballocations.insert(request.item, paddingSuballoc);
9461 RegisterFreeSuballocation(paddingBeginItem);
9465 m_FreeCount = m_FreeCount - 1;
9466 if(paddingBegin > 0)
9474 m_SumFreeSize -= allocSize;
9477 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9479 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9480 suballocItem != m_Suballocations.end();
9483 VmaSuballocation& suballoc = *suballocItem;
9484 if(suballoc.hAllocation == allocation)
9486 FreeSuballocation(suballocItem);
9487 VMA_HEAVY_ASSERT(Validate());
9491 VMA_ASSERT(0 &&
"Not found!");
9494 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9496 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9497 suballocItem != m_Suballocations.end();
9500 VmaSuballocation& suballoc = *suballocItem;
9501 if(suballoc.offset == offset)
9503 FreeSuballocation(suballocItem);
9507 VMA_ASSERT(0 &&
"Not found!");
9510 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9512 VkDeviceSize lastSize = 0;
9513 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9515 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9517 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9518 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9519 VMA_VALIDATE(it->size >= lastSize);
9520 lastSize = it->size;
9525 bool VmaBlockMetadata_Generic::CheckAllocation(
9526 uint32_t currentFrameIndex,
9527 uint32_t frameInUseCount,
9528 VkDeviceSize bufferImageGranularity,
9529 VkDeviceSize allocSize,
9530 VkDeviceSize allocAlignment,
9531 VmaSuballocationType allocType,
9532 VmaSuballocationList::const_iterator suballocItem,
9533 bool canMakeOtherLost,
9534 VkDeviceSize* pOffset,
9535 size_t* itemsToMakeLostCount,
9536 VkDeviceSize* pSumFreeSize,
9537 VkDeviceSize* pSumItemSize)
const
9539 VMA_ASSERT(allocSize > 0);
9540 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9541 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9542 VMA_ASSERT(pOffset != VMA_NULL);
9544 *itemsToMakeLostCount = 0;
9548 if(canMakeOtherLost)
9550 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9552 *pSumFreeSize = suballocItem->size;
9556 if(suballocItem->hAllocation->CanBecomeLost() &&
9557 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9559 ++*itemsToMakeLostCount;
9560 *pSumItemSize = suballocItem->size;
9569 if(GetSize() - suballocItem->offset < allocSize)
9575 *pOffset = suballocItem->offset;
9578 if(VMA_DEBUG_MARGIN > 0)
9580 *pOffset += VMA_DEBUG_MARGIN;
9584 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9588 if(bufferImageGranularity > 1)
9590 bool bufferImageGranularityConflict =
false;
9591 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9592 while(prevSuballocItem != m_Suballocations.cbegin())
9595 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9596 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9598 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9600 bufferImageGranularityConflict =
true;
9608 if(bufferImageGranularityConflict)
9610 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9616 if(*pOffset >= suballocItem->offset + suballocItem->size)
9622 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9625 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9627 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9629 if(suballocItem->offset + totalSize > GetSize())
9636 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9637 if(totalSize > suballocItem->size)
9639 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9640 while(remainingSize > 0)
9643 if(lastSuballocItem == m_Suballocations.cend())
9647 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9649 *pSumFreeSize += lastSuballocItem->size;
9653 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9654 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9655 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9657 ++*itemsToMakeLostCount;
9658 *pSumItemSize += lastSuballocItem->size;
9665 remainingSize = (lastSuballocItem->size < remainingSize) ?
9666 remainingSize - lastSuballocItem->size : 0;
9672 if(bufferImageGranularity > 1)
9674 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9676 while(nextSuballocItem != m_Suballocations.cend())
9678 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9679 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9681 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9683 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9684 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9685 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9687 ++*itemsToMakeLostCount;
9706 const VmaSuballocation& suballoc = *suballocItem;
9707 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9709 *pSumFreeSize = suballoc.size;
9712 if(suballoc.size < allocSize)
9718 *pOffset = suballoc.offset;
9721 if(VMA_DEBUG_MARGIN > 0)
9723 *pOffset += VMA_DEBUG_MARGIN;
9727 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9731 if(bufferImageGranularity > 1)
9733 bool bufferImageGranularityConflict =
false;
9734 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9735 while(prevSuballocItem != m_Suballocations.cbegin())
9738 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9739 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9741 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9743 bufferImageGranularityConflict =
true;
9751 if(bufferImageGranularityConflict)
9753 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9758 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9761 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9764 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9771 if(bufferImageGranularity > 1)
9773 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9775 while(nextSuballocItem != m_Suballocations.cend())
9777 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9778 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9780 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9799 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9801 VMA_ASSERT(item != m_Suballocations.end());
9802 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9804 VmaSuballocationList::iterator nextItem = item;
9806 VMA_ASSERT(nextItem != m_Suballocations.end());
9807 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9809 item->size += nextItem->size;
9811 m_Suballocations.erase(nextItem);
9814 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9817 VmaSuballocation& suballoc = *suballocItem;
9818 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9819 suballoc.hAllocation = VK_NULL_HANDLE;
9823 m_SumFreeSize += suballoc.size;
9826 bool mergeWithNext =
false;
9827 bool mergeWithPrev =
false;
9829 VmaSuballocationList::iterator nextItem = suballocItem;
9831 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9833 mergeWithNext =
true;
9836 VmaSuballocationList::iterator prevItem = suballocItem;
9837 if(suballocItem != m_Suballocations.begin())
9840 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9842 mergeWithPrev =
true;
9848 UnregisterFreeSuballocation(nextItem);
9849 MergeFreeWithNext(suballocItem);
9854 UnregisterFreeSuballocation(prevItem);
9855 MergeFreeWithNext(prevItem);
9856 RegisterFreeSuballocation(prevItem);
9861 RegisterFreeSuballocation(suballocItem);
9862 return suballocItem;
9866 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9868 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9869 VMA_ASSERT(item->size > 0);
9873 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9875 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9877 if(m_FreeSuballocationsBySize.empty())
9879 m_FreeSuballocationsBySize.push_back(item);
9883 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9891 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9893 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9894 VMA_ASSERT(item->size > 0);
9898 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9900 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9902 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9903 m_FreeSuballocationsBySize.data(),
9904 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9906 VmaSuballocationItemSizeLess());
9907 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9908 index < m_FreeSuballocationsBySize.size();
9911 if(m_FreeSuballocationsBySize[index] == item)
9913 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9916 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9918 VMA_ASSERT(0 &&
"Not found.");
9924 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9925 VkDeviceSize bufferImageGranularity,
9926 VmaSuballocationType& inOutPrevSuballocType)
const
9928 if(bufferImageGranularity == 1 || IsEmpty())
9933 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9934 bool typeConflictFound =
false;
9935 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9936 it != m_Suballocations.cend();
9939 const VmaSuballocationType suballocType = it->type;
9940 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9942 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9943 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9945 typeConflictFound =
true;
9947 inOutPrevSuballocType = suballocType;
9951 return typeConflictFound || minAlignment >= bufferImageGranularity;
9957 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9958 VmaBlockMetadata(hAllocator),
9960 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9961 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9962 m_1stVectorIndex(0),
9963 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9964 m_1stNullItemsBeginCount(0),
9965 m_1stNullItemsMiddleCount(0),
9966 m_2ndNullItemsCount(0)
9970 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9974 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9976 VmaBlockMetadata::Init(size);
9977 m_SumFreeSize = size;
9980 bool VmaBlockMetadata_Linear::Validate()
const
9982 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9983 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9985 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9986 VMA_VALIDATE(!suballocations1st.empty() ||
9987 suballocations2nd.empty() ||
9988 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9990 if(!suballocations1st.empty())
9993 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9995 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9997 if(!suballocations2nd.empty())
10000 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10003 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10004 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10006 VkDeviceSize sumUsedSize = 0;
10007 const size_t suballoc1stCount = suballocations1st.size();
10008 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10010 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10012 const size_t suballoc2ndCount = suballocations2nd.size();
10013 size_t nullItem2ndCount = 0;
10014 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10016 const VmaSuballocation& suballoc = suballocations2nd[i];
10017 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10019 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10020 VMA_VALIDATE(suballoc.offset >= offset);
10024 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10025 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10026 sumUsedSize += suballoc.size;
10030 ++nullItem2ndCount;
10033 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10036 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10039 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10041 const VmaSuballocation& suballoc = suballocations1st[i];
10042 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10043 suballoc.hAllocation == VK_NULL_HANDLE);
10046 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10048 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10050 const VmaSuballocation& suballoc = suballocations1st[i];
10051 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10053 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10054 VMA_VALIDATE(suballoc.offset >= offset);
10055 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10059 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10060 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10061 sumUsedSize += suballoc.size;
10065 ++nullItem1stCount;
10068 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10070 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10072 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10074 const size_t suballoc2ndCount = suballocations2nd.size();
10075 size_t nullItem2ndCount = 0;
10076 for(
size_t i = suballoc2ndCount; i--; )
10078 const VmaSuballocation& suballoc = suballocations2nd[i];
10079 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10081 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10082 VMA_VALIDATE(suballoc.offset >= offset);
10086 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10087 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10088 sumUsedSize += suballoc.size;
10092 ++nullItem2ndCount;
10095 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10098 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10101 VMA_VALIDATE(offset <= GetSize());
10102 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10107 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10109 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10110 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10113 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10115 const VkDeviceSize size = GetSize();
10127 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10129 switch(m_2ndVectorMode)
10131 case SECOND_VECTOR_EMPTY:
10137 const size_t suballocations1stCount = suballocations1st.size();
10138 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10139 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10140 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10142 firstSuballoc.offset,
10143 size - (lastSuballoc.offset + lastSuballoc.size));
10147 case SECOND_VECTOR_RING_BUFFER:
10152 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10153 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10154 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10155 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10159 case SECOND_VECTOR_DOUBLE_STACK:
10164 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10165 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10166 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10167 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10177 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10179 const VkDeviceSize size = GetSize();
10180 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10181 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10182 const size_t suballoc1stCount = suballocations1st.size();
10183 const size_t suballoc2ndCount = suballocations2nd.size();
10194 VkDeviceSize lastOffset = 0;
10196 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10198 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10199 size_t nextAlloc2ndIndex = 0;
10200 while(lastOffset < freeSpace2ndTo1stEnd)
10203 while(nextAlloc2ndIndex < suballoc2ndCount &&
10204 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10206 ++nextAlloc2ndIndex;
10210 if(nextAlloc2ndIndex < suballoc2ndCount)
10212 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10215 if(lastOffset < suballoc.offset)
10218 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10232 lastOffset = suballoc.offset + suballoc.size;
10233 ++nextAlloc2ndIndex;
10239 if(lastOffset < freeSpace2ndTo1stEnd)
10241 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10249 lastOffset = freeSpace2ndTo1stEnd;
10254 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10255 const VkDeviceSize freeSpace1stTo2ndEnd =
10256 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10257 while(lastOffset < freeSpace1stTo2ndEnd)
10260 while(nextAlloc1stIndex < suballoc1stCount &&
10261 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10263 ++nextAlloc1stIndex;
10267 if(nextAlloc1stIndex < suballoc1stCount)
10269 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10272 if(lastOffset < suballoc.offset)
10275 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10289 lastOffset = suballoc.offset + suballoc.size;
10290 ++nextAlloc1stIndex;
10296 if(lastOffset < freeSpace1stTo2ndEnd)
10298 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10306 lastOffset = freeSpace1stTo2ndEnd;
10310 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10312 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10313 while(lastOffset < size)
10316 while(nextAlloc2ndIndex != SIZE_MAX &&
10317 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10319 --nextAlloc2ndIndex;
10323 if(nextAlloc2ndIndex != SIZE_MAX)
10325 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10328 if(lastOffset < suballoc.offset)
10331 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10345 lastOffset = suballoc.offset + suballoc.size;
10346 --nextAlloc2ndIndex;
10352 if(lastOffset < size)
10354 const VkDeviceSize unusedRangeSize = size - lastOffset;
10370 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10372 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10373 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10374 const VkDeviceSize size = GetSize();
10375 const size_t suballoc1stCount = suballocations1st.size();
10376 const size_t suballoc2ndCount = suballocations2nd.size();
10378 inoutStats.
size += size;
10380 VkDeviceSize lastOffset = 0;
10382 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10384 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10385 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10386 while(lastOffset < freeSpace2ndTo1stEnd)
10389 while(nextAlloc2ndIndex < suballoc2ndCount &&
10390 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10392 ++nextAlloc2ndIndex;
10396 if(nextAlloc2ndIndex < suballoc2ndCount)
10398 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10401 if(lastOffset < suballoc.offset)
10404 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10415 lastOffset = suballoc.offset + suballoc.size;
10416 ++nextAlloc2ndIndex;
10421 if(lastOffset < freeSpace2ndTo1stEnd)
10424 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10431 lastOffset = freeSpace2ndTo1stEnd;
10436 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10437 const VkDeviceSize freeSpace1stTo2ndEnd =
10438 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10439 while(lastOffset < freeSpace1stTo2ndEnd)
10442 while(nextAlloc1stIndex < suballoc1stCount &&
10443 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10445 ++nextAlloc1stIndex;
10449 if(nextAlloc1stIndex < suballoc1stCount)
10451 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10454 if(lastOffset < suballoc.offset)
10457 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10468 lastOffset = suballoc.offset + suballoc.size;
10469 ++nextAlloc1stIndex;
10474 if(lastOffset < freeSpace1stTo2ndEnd)
10477 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10484 lastOffset = freeSpace1stTo2ndEnd;
10488 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10490 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10491 while(lastOffset < size)
10494 while(nextAlloc2ndIndex != SIZE_MAX &&
10495 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10497 --nextAlloc2ndIndex;
10501 if(nextAlloc2ndIndex != SIZE_MAX)
10503 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10506 if(lastOffset < suballoc.offset)
10509 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10520 lastOffset = suballoc.offset + suballoc.size;
10521 --nextAlloc2ndIndex;
10526 if(lastOffset < size)
10529 const VkDeviceSize unusedRangeSize = size - lastOffset;
10542 #if VMA_STATS_STRING_ENABLED
10543 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10545 const VkDeviceSize size = GetSize();
10546 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10547 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10548 const size_t suballoc1stCount = suballocations1st.size();
10549 const size_t suballoc2ndCount = suballocations2nd.size();
10553 size_t unusedRangeCount = 0;
10554 VkDeviceSize usedBytes = 0;
10556 VkDeviceSize lastOffset = 0;
10558 size_t alloc2ndCount = 0;
10559 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10561 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10562 size_t nextAlloc2ndIndex = 0;
10563 while(lastOffset < freeSpace2ndTo1stEnd)
10566 while(nextAlloc2ndIndex < suballoc2ndCount &&
10567 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10569 ++nextAlloc2ndIndex;
10573 if(nextAlloc2ndIndex < suballoc2ndCount)
10575 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10578 if(lastOffset < suballoc.offset)
10581 ++unusedRangeCount;
10587 usedBytes += suballoc.size;
10590 lastOffset = suballoc.offset + suballoc.size;
10591 ++nextAlloc2ndIndex;
10596 if(lastOffset < freeSpace2ndTo1stEnd)
10599 ++unusedRangeCount;
10603 lastOffset = freeSpace2ndTo1stEnd;
10608 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10609 size_t alloc1stCount = 0;
10610 const VkDeviceSize freeSpace1stTo2ndEnd =
10611 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10612 while(lastOffset < freeSpace1stTo2ndEnd)
10615 while(nextAlloc1stIndex < suballoc1stCount &&
10616 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10618 ++nextAlloc1stIndex;
10622 if(nextAlloc1stIndex < suballoc1stCount)
10624 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10627 if(lastOffset < suballoc.offset)
10630 ++unusedRangeCount;
10636 usedBytes += suballoc.size;
10639 lastOffset = suballoc.offset + suballoc.size;
10640 ++nextAlloc1stIndex;
10645 if(lastOffset < size)
10648 ++unusedRangeCount;
10652 lastOffset = freeSpace1stTo2ndEnd;
10656 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10658 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10659 while(lastOffset < size)
10662 while(nextAlloc2ndIndex != SIZE_MAX &&
10663 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10665 --nextAlloc2ndIndex;
10669 if(nextAlloc2ndIndex != SIZE_MAX)
10671 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10674 if(lastOffset < suballoc.offset)
10677 ++unusedRangeCount;
10683 usedBytes += suballoc.size;
10686 lastOffset = suballoc.offset + suballoc.size;
10687 --nextAlloc2ndIndex;
10692 if(lastOffset < size)
10695 ++unusedRangeCount;
10704 const VkDeviceSize unusedBytes = size - usedBytes;
10705 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10710 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10712 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10713 size_t nextAlloc2ndIndex = 0;
10714 while(lastOffset < freeSpace2ndTo1stEnd)
10717 while(nextAlloc2ndIndex < suballoc2ndCount &&
10718 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10720 ++nextAlloc2ndIndex;
10724 if(nextAlloc2ndIndex < suballoc2ndCount)
10726 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10729 if(lastOffset < suballoc.offset)
10732 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10733 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10738 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10741 lastOffset = suballoc.offset + suballoc.size;
10742 ++nextAlloc2ndIndex;
10747 if(lastOffset < freeSpace2ndTo1stEnd)
10750 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10751 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10755 lastOffset = freeSpace2ndTo1stEnd;
10760 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10761 while(lastOffset < freeSpace1stTo2ndEnd)
10764 while(nextAlloc1stIndex < suballoc1stCount &&
10765 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10767 ++nextAlloc1stIndex;
10771 if(nextAlloc1stIndex < suballoc1stCount)
10773 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10776 if(lastOffset < suballoc.offset)
10779 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10780 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10785 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10788 lastOffset = suballoc.offset + suballoc.size;
10789 ++nextAlloc1stIndex;
10794 if(lastOffset < freeSpace1stTo2ndEnd)
10797 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10798 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10802 lastOffset = freeSpace1stTo2ndEnd;
10806 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10808 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10809 while(lastOffset < size)
10812 while(nextAlloc2ndIndex != SIZE_MAX &&
10813 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10815 --nextAlloc2ndIndex;
10819 if(nextAlloc2ndIndex != SIZE_MAX)
10821 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10824 if(lastOffset < suballoc.offset)
10827 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10828 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10833 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10836 lastOffset = suballoc.offset + suballoc.size;
10837 --nextAlloc2ndIndex;
10842 if(lastOffset < size)
10845 const VkDeviceSize unusedRangeSize = size - lastOffset;
10846 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10855 PrintDetailedMap_End(json);
10857 #endif // #if VMA_STATS_STRING_ENABLED
10859 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10860 uint32_t currentFrameIndex,
10861 uint32_t frameInUseCount,
10862 VkDeviceSize bufferImageGranularity,
10863 VkDeviceSize allocSize,
10864 VkDeviceSize allocAlignment,
10866 VmaSuballocationType allocType,
10867 bool canMakeOtherLost,
10869 VmaAllocationRequest* pAllocationRequest)
10871 VMA_ASSERT(allocSize > 0);
10872 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10873 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10874 VMA_HEAVY_ASSERT(Validate());
10875 return upperAddress ?
10876 CreateAllocationRequest_UpperAddress(
10877 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10878 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10879 CreateAllocationRequest_LowerAddress(
10880 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10881 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10884 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10885 uint32_t currentFrameIndex,
10886 uint32_t frameInUseCount,
10887 VkDeviceSize bufferImageGranularity,
10888 VkDeviceSize allocSize,
10889 VkDeviceSize allocAlignment,
10890 VmaSuballocationType allocType,
10891 bool canMakeOtherLost,
10893 VmaAllocationRequest* pAllocationRequest)
10895 const VkDeviceSize size = GetSize();
10896 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10897 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10899 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10901 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10906 if(allocSize > size)
10910 VkDeviceSize resultBaseOffset = size - allocSize;
10911 if(!suballocations2nd.empty())
10913 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10914 resultBaseOffset = lastSuballoc.offset - allocSize;
10915 if(allocSize > lastSuballoc.offset)
10922 VkDeviceSize resultOffset = resultBaseOffset;
10925 if(VMA_DEBUG_MARGIN > 0)
10927 if(resultOffset < VMA_DEBUG_MARGIN)
10931 resultOffset -= VMA_DEBUG_MARGIN;
10935 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10939 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10941 bool bufferImageGranularityConflict =
false;
10942 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10944 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10945 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10947 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10949 bufferImageGranularityConflict =
true;
10957 if(bufferImageGranularityConflict)
10959 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10964 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10965 suballocations1st.back().offset + suballocations1st.back().size :
10967 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10971 if(bufferImageGranularity > 1)
10973 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10975 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10976 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10978 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10992 pAllocationRequest->offset = resultOffset;
10993 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10994 pAllocationRequest->sumItemSize = 0;
10996 pAllocationRequest->itemsToMakeLostCount = 0;
10997 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11004 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11005 uint32_t currentFrameIndex,
11006 uint32_t frameInUseCount,
11007 VkDeviceSize bufferImageGranularity,
11008 VkDeviceSize allocSize,
11009 VkDeviceSize allocAlignment,
11010 VmaSuballocationType allocType,
11011 bool canMakeOtherLost,
11013 VmaAllocationRequest* pAllocationRequest)
11015 const VkDeviceSize size = GetSize();
11016 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11017 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11019 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11023 VkDeviceSize resultBaseOffset = 0;
11024 if(!suballocations1st.empty())
11026 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11027 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11031 VkDeviceSize resultOffset = resultBaseOffset;
11034 if(VMA_DEBUG_MARGIN > 0)
11036 resultOffset += VMA_DEBUG_MARGIN;
11040 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11044 if(bufferImageGranularity > 1 && !suballocations1st.empty())
11046 bool bufferImageGranularityConflict =
false;
11047 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11049 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11050 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11052 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11054 bufferImageGranularityConflict =
true;
11062 if(bufferImageGranularityConflict)
11064 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11068 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11069 suballocations2nd.back().offset : size;
11072 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11076 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11078 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11080 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11081 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11083 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11097 pAllocationRequest->offset = resultOffset;
11098 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11099 pAllocationRequest->sumItemSize = 0;
11101 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11102 pAllocationRequest->itemsToMakeLostCount = 0;
11109 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11111 VMA_ASSERT(!suballocations1st.empty());
11113 VkDeviceSize resultBaseOffset = 0;
11114 if(!suballocations2nd.empty())
11116 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11117 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11121 VkDeviceSize resultOffset = resultBaseOffset;
11124 if(VMA_DEBUG_MARGIN > 0)
11126 resultOffset += VMA_DEBUG_MARGIN;
11130 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11134 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
11136 bool bufferImageGranularityConflict =
false;
11137 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11139 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11140 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11142 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11144 bufferImageGranularityConflict =
true;
11152 if(bufferImageGranularityConflict)
11154 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11158 pAllocationRequest->itemsToMakeLostCount = 0;
11159 pAllocationRequest->sumItemSize = 0;
11160 size_t index1st = m_1stNullItemsBeginCount;
11162 if(canMakeOtherLost)
11164 while(index1st < suballocations1st.size() &&
11165 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11168 const VmaSuballocation& suballoc = suballocations1st[index1st];
11169 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11175 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11176 if(suballoc.hAllocation->CanBecomeLost() &&
11177 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11179 ++pAllocationRequest->itemsToMakeLostCount;
11180 pAllocationRequest->sumItemSize += suballoc.size;
11192 if(bufferImageGranularity > 1)
11194 while(index1st < suballocations1st.size())
11196 const VmaSuballocation& suballoc = suballocations1st[index1st];
11197 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11199 if(suballoc.hAllocation != VK_NULL_HANDLE)
11202 if(suballoc.hAllocation->CanBecomeLost() &&
11203 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11205 ++pAllocationRequest->itemsToMakeLostCount;
11206 pAllocationRequest->sumItemSize += suballoc.size;
11224 if(index1st == suballocations1st.size() &&
11225 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11228 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11233 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11234 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11238 if(bufferImageGranularity > 1)
11240 for(
size_t nextSuballocIndex = index1st;
11241 nextSuballocIndex < suballocations1st.size();
11242 nextSuballocIndex++)
11244 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11245 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11247 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11261 pAllocationRequest->offset = resultOffset;
11262 pAllocationRequest->sumFreeSize =
11263 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11265 - pAllocationRequest->sumItemSize;
11266 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11275 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11276 uint32_t currentFrameIndex,
11277 uint32_t frameInUseCount,
11278 VmaAllocationRequest* pAllocationRequest)
11280 if(pAllocationRequest->itemsToMakeLostCount == 0)
11285 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11288 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11289 size_t index = m_1stNullItemsBeginCount;
11290 size_t madeLostCount = 0;
11291 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11293 if(index == suballocations->size())
11297 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11299 suballocations = &AccessSuballocations2nd();
11303 VMA_ASSERT(!suballocations->empty());
11305 VmaSuballocation& suballoc = (*suballocations)[index];
11306 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11308 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11309 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11310 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11312 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11313 suballoc.hAllocation = VK_NULL_HANDLE;
11314 m_SumFreeSize += suballoc.size;
11315 if(suballocations == &AccessSuballocations1st())
11317 ++m_1stNullItemsMiddleCount;
11321 ++m_2ndNullItemsCount;
11333 CleanupAfterFree();
11339 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11341 uint32_t lostAllocationCount = 0;
11343 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11344 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11346 VmaSuballocation& suballoc = suballocations1st[i];
11347 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11348 suballoc.hAllocation->CanBecomeLost() &&
11349 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11351 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11352 suballoc.hAllocation = VK_NULL_HANDLE;
11353 ++m_1stNullItemsMiddleCount;
11354 m_SumFreeSize += suballoc.size;
11355 ++lostAllocationCount;
11359 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11360 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11362 VmaSuballocation& suballoc = suballocations2nd[i];
11363 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11364 suballoc.hAllocation->CanBecomeLost() &&
11365 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11367 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11368 suballoc.hAllocation = VK_NULL_HANDLE;
11369 ++m_2ndNullItemsCount;
11370 m_SumFreeSize += suballoc.size;
11371 ++lostAllocationCount;
11375 if(lostAllocationCount)
11377 CleanupAfterFree();
11380 return lostAllocationCount;
11383 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11385 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11386 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11388 const VmaSuballocation& suballoc = suballocations1st[i];
11389 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11391 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11393 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11394 return VK_ERROR_VALIDATION_FAILED_EXT;
11396 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11398 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11399 return VK_ERROR_VALIDATION_FAILED_EXT;
11404 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11405 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11407 const VmaSuballocation& suballoc = suballocations2nd[i];
11408 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11410 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11412 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11413 return VK_ERROR_VALIDATION_FAILED_EXT;
11415 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11417 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11418 return VK_ERROR_VALIDATION_FAILED_EXT;
11426 void VmaBlockMetadata_Linear::Alloc(
11427 const VmaAllocationRequest& request,
11428 VmaSuballocationType type,
11429 VkDeviceSize allocSize,
11432 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11434 switch(request.type)
11436 case VmaAllocationRequestType::UpperAddress:
11438 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11439 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11440 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11441 suballocations2nd.push_back(newSuballoc);
11442 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11445 case VmaAllocationRequestType::EndOf1st:
11447 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11449 VMA_ASSERT(suballocations1st.empty() ||
11450 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11452 VMA_ASSERT(request.offset + allocSize <= GetSize());
11454 suballocations1st.push_back(newSuballoc);
11457 case VmaAllocationRequestType::EndOf2nd:
11459 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11461 VMA_ASSERT(!suballocations1st.empty() &&
11462 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11463 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11465 switch(m_2ndVectorMode)
11467 case SECOND_VECTOR_EMPTY:
11469 VMA_ASSERT(suballocations2nd.empty());
11470 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11472 case SECOND_VECTOR_RING_BUFFER:
11474 VMA_ASSERT(!suballocations2nd.empty());
11476 case SECOND_VECTOR_DOUBLE_STACK:
11477 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11483 suballocations2nd.push_back(newSuballoc);
11487 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11490 m_SumFreeSize -= newSuballoc.size;
11493 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11495 FreeAtOffset(allocation->GetOffset());
11498 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11500 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11501 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11503 if(!suballocations1st.empty())
11506 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11507 if(firstSuballoc.offset == offset)
11509 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11510 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11511 m_SumFreeSize += firstSuballoc.size;
11512 ++m_1stNullItemsBeginCount;
11513 CleanupAfterFree();
11519 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11520 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11522 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11523 if(lastSuballoc.offset == offset)
11525 m_SumFreeSize += lastSuballoc.size;
11526 suballocations2nd.pop_back();
11527 CleanupAfterFree();
11532 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11534 VmaSuballocation& lastSuballoc = suballocations1st.back();
11535 if(lastSuballoc.offset == offset)
11537 m_SumFreeSize += lastSuballoc.size;
11538 suballocations1st.pop_back();
11539 CleanupAfterFree();
11546 VmaSuballocation refSuballoc;
11547 refSuballoc.offset = offset;
11549 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11550 suballocations1st.begin() + m_1stNullItemsBeginCount,
11551 suballocations1st.end(),
11553 VmaSuballocationOffsetLess());
11554 if(it != suballocations1st.end())
11556 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11557 it->hAllocation = VK_NULL_HANDLE;
11558 ++m_1stNullItemsMiddleCount;
11559 m_SumFreeSize += it->size;
11560 CleanupAfterFree();
11565 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11568 VmaSuballocation refSuballoc;
11569 refSuballoc.offset = offset;
11571 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11572 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11573 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11574 if(it != suballocations2nd.end())
11576 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11577 it->hAllocation = VK_NULL_HANDLE;
11578 ++m_2ndNullItemsCount;
11579 m_SumFreeSize += it->size;
11580 CleanupAfterFree();
11585 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11588 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11590 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11591 const size_t suballocCount = AccessSuballocations1st().size();
11592 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11595 void VmaBlockMetadata_Linear::CleanupAfterFree()
11597 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11598 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11602 suballocations1st.clear();
11603 suballocations2nd.clear();
11604 m_1stNullItemsBeginCount = 0;
11605 m_1stNullItemsMiddleCount = 0;
11606 m_2ndNullItemsCount = 0;
11607 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11611 const size_t suballoc1stCount = suballocations1st.size();
11612 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11613 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11616 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11617 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11619 ++m_1stNullItemsBeginCount;
11620 --m_1stNullItemsMiddleCount;
11624 while(m_1stNullItemsMiddleCount > 0 &&
11625 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11627 --m_1stNullItemsMiddleCount;
11628 suballocations1st.pop_back();
11632 while(m_2ndNullItemsCount > 0 &&
11633 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11635 --m_2ndNullItemsCount;
11636 suballocations2nd.pop_back();
11640 while(m_2ndNullItemsCount > 0 &&
11641 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11643 --m_2ndNullItemsCount;
11644 VmaVectorRemove(suballocations2nd, 0);
11647 if(ShouldCompact1st())
11649 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11650 size_t srcIndex = m_1stNullItemsBeginCount;
11651 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11653 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11657 if(dstIndex != srcIndex)
11659 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11663 suballocations1st.resize(nonNullItemCount);
11664 m_1stNullItemsBeginCount = 0;
11665 m_1stNullItemsMiddleCount = 0;
11669 if(suballocations2nd.empty())
11671 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11675 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11677 suballocations1st.clear();
11678 m_1stNullItemsBeginCount = 0;
11680 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11683 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11684 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11685 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11686 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11688 ++m_1stNullItemsBeginCount;
11689 --m_1stNullItemsMiddleCount;
11691 m_2ndNullItemsCount = 0;
11692 m_1stVectorIndex ^= 1;
11697 VMA_HEAVY_ASSERT(Validate());
11704 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11705 VmaBlockMetadata(hAllocator),
11707 m_AllocationCount(0),
11711 memset(m_FreeList, 0,
sizeof(m_FreeList));
11714 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11716 DeleteNode(m_Root);
11719 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11721 VmaBlockMetadata::Init(size);
11723 m_UsableSize = VmaPrevPow2(size);
11724 m_SumFreeSize = m_UsableSize;
11728 while(m_LevelCount < MAX_LEVELS &&
11729 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11734 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11735 rootNode->offset = 0;
11736 rootNode->type = Node::TYPE_FREE;
11737 rootNode->parent = VMA_NULL;
11738 rootNode->buddy = VMA_NULL;
11741 AddToFreeListFront(0, rootNode);
11744 bool VmaBlockMetadata_Buddy::Validate()
const
11747 ValidationContext ctx;
11748 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11750 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11752 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11753 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11756 for(uint32_t level = 0; level < m_LevelCount; ++level)
11758 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11759 m_FreeList[level].front->free.prev == VMA_NULL);
11761 for(Node* node = m_FreeList[level].front;
11763 node = node->free.next)
11765 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11767 if(node->free.next == VMA_NULL)
11769 VMA_VALIDATE(m_FreeList[level].back == node);
11773 VMA_VALIDATE(node->free.next->free.prev == node);
11779 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11781 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11787 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11789 for(uint32_t level = 0; level < m_LevelCount; ++level)
11791 if(m_FreeList[level].front != VMA_NULL)
11793 return LevelToNodeSize(level);
11799 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11801 const VkDeviceSize unusableSize = GetUnusableSize();
11812 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11814 if(unusableSize > 0)
11823 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11825 const VkDeviceSize unusableSize = GetUnusableSize();
11827 inoutStats.
size += GetSize();
11828 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11833 if(unusableSize > 0)
11840 #if VMA_STATS_STRING_ENABLED
11842 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11846 CalcAllocationStatInfo(stat);
11848 PrintDetailedMap_Begin(
11854 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11856 const VkDeviceSize unusableSize = GetUnusableSize();
11857 if(unusableSize > 0)
11859 PrintDetailedMap_UnusedRange(json,
11864 PrintDetailedMap_End(json);
11867 #endif // #if VMA_STATS_STRING_ENABLED
11869 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11870 uint32_t currentFrameIndex,
11871 uint32_t frameInUseCount,
11872 VkDeviceSize bufferImageGranularity,
11873 VkDeviceSize allocSize,
11874 VkDeviceSize allocAlignment,
11876 VmaSuballocationType allocType,
11877 bool canMakeOtherLost,
11879 VmaAllocationRequest* pAllocationRequest)
11881 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11885 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11886 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11887 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11889 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11890 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11893 if(allocSize > m_UsableSize)
11898 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11899 for(uint32_t level = targetLevel + 1; level--; )
11901 for(Node* freeNode = m_FreeList[level].front;
11902 freeNode != VMA_NULL;
11903 freeNode = freeNode->free.next)
11905 if(freeNode->offset % allocAlignment == 0)
11907 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11908 pAllocationRequest->offset = freeNode->offset;
11909 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11910 pAllocationRequest->sumItemSize = 0;
11911 pAllocationRequest->itemsToMakeLostCount = 0;
11912 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11921 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11922 uint32_t currentFrameIndex,
11923 uint32_t frameInUseCount,
11924 VmaAllocationRequest* pAllocationRequest)
11930 return pAllocationRequest->itemsToMakeLostCount == 0;
11933 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11942 void VmaBlockMetadata_Buddy::Alloc(
11943 const VmaAllocationRequest& request,
11944 VmaSuballocationType type,
11945 VkDeviceSize allocSize,
11948 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11950 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11951 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11953 Node* currNode = m_FreeList[currLevel].front;
11954 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11955 while(currNode->offset != request.offset)
11957 currNode = currNode->free.next;
11958 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11962 while(currLevel < targetLevel)
11966 RemoveFromFreeList(currLevel, currNode);
11968 const uint32_t childrenLevel = currLevel + 1;
11971 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11972 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11974 leftChild->offset = currNode->offset;
11975 leftChild->type = Node::TYPE_FREE;
11976 leftChild->parent = currNode;
11977 leftChild->buddy = rightChild;
11979 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11980 rightChild->type = Node::TYPE_FREE;
11981 rightChild->parent = currNode;
11982 rightChild->buddy = leftChild;
11985 currNode->type = Node::TYPE_SPLIT;
11986 currNode->split.leftChild = leftChild;
11989 AddToFreeListFront(childrenLevel, rightChild);
11990 AddToFreeListFront(childrenLevel, leftChild);
11995 currNode = m_FreeList[currLevel].front;
12004 VMA_ASSERT(currLevel == targetLevel &&
12005 currNode != VMA_NULL &&
12006 currNode->type == Node::TYPE_FREE);
12007 RemoveFromFreeList(currLevel, currNode);
12010 currNode->type = Node::TYPE_ALLOCATION;
12011 currNode->allocation.alloc = hAllocation;
12013 ++m_AllocationCount;
12015 m_SumFreeSize -= allocSize;
12018 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12020 if(node->type == Node::TYPE_SPLIT)
12022 DeleteNode(node->split.leftChild->buddy);
12023 DeleteNode(node->split.leftChild);
12026 vma_delete(GetAllocationCallbacks(), node);
12029 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12031 VMA_VALIDATE(level < m_LevelCount);
12032 VMA_VALIDATE(curr->parent == parent);
12033 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12034 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12037 case Node::TYPE_FREE:
12039 ctx.calculatedSumFreeSize += levelNodeSize;
12040 ++ctx.calculatedFreeCount;
12042 case Node::TYPE_ALLOCATION:
12043 ++ctx.calculatedAllocationCount;
12044 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12045 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12047 case Node::TYPE_SPLIT:
12049 const uint32_t childrenLevel = level + 1;
12050 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12051 const Node*
const leftChild = curr->split.leftChild;
12052 VMA_VALIDATE(leftChild != VMA_NULL);
12053 VMA_VALIDATE(leftChild->offset == curr->offset);
12054 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12056 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12058 const Node*
const rightChild = leftChild->buddy;
12059 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12060 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12062 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12073 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12076 uint32_t level = 0;
12077 VkDeviceSize currLevelNodeSize = m_UsableSize;
12078 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12079 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12082 currLevelNodeSize = nextLevelNodeSize;
12083 nextLevelNodeSize = currLevelNodeSize >> 1;
12088 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12091 Node* node = m_Root;
12092 VkDeviceSize nodeOffset = 0;
12093 uint32_t level = 0;
12094 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12095 while(node->type == Node::TYPE_SPLIT)
12097 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12098 if(offset < nodeOffset + nextLevelSize)
12100 node = node->split.leftChild;
12104 node = node->split.leftChild->buddy;
12105 nodeOffset += nextLevelSize;
12108 levelNodeSize = nextLevelSize;
12111 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12112 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12115 --m_AllocationCount;
12116 m_SumFreeSize += alloc->GetSize();
12118 node->type = Node::TYPE_FREE;
12121 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12123 RemoveFromFreeList(level, node->buddy);
12124 Node*
const parent = node->parent;
12126 vma_delete(GetAllocationCallbacks(), node->buddy);
12127 vma_delete(GetAllocationCallbacks(), node);
12128 parent->type = Node::TYPE_FREE;
12136 AddToFreeListFront(level, node);
12139 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12143 case Node::TYPE_FREE:
12149 case Node::TYPE_ALLOCATION:
12151 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12157 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12158 if(unusedRangeSize > 0)
12167 case Node::TYPE_SPLIT:
12169 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12170 const Node*
const leftChild = node->split.leftChild;
12171 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12172 const Node*
const rightChild = leftChild->buddy;
12173 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12181 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12183 VMA_ASSERT(node->type == Node::TYPE_FREE);
12186 Node*
const frontNode = m_FreeList[level].front;
12187 if(frontNode == VMA_NULL)
12189 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12190 node->free.prev = node->free.next = VMA_NULL;
12191 m_FreeList[level].front = m_FreeList[level].back = node;
12195 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12196 node->free.prev = VMA_NULL;
12197 node->free.next = frontNode;
12198 frontNode->free.prev = node;
12199 m_FreeList[level].front = node;
12203 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12205 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12208 if(node->free.prev == VMA_NULL)
12210 VMA_ASSERT(m_FreeList[level].front == node);
12211 m_FreeList[level].front = node->free.next;
12215 Node*
const prevFreeNode = node->free.prev;
12216 VMA_ASSERT(prevFreeNode->free.next == node);
12217 prevFreeNode->free.next = node->free.next;
12221 if(node->free.next == VMA_NULL)
12223 VMA_ASSERT(m_FreeList[level].back == node);
12224 m_FreeList[level].back = node->free.prev;
12228 Node*
const nextFreeNode = node->free.next;
12229 VMA_ASSERT(nextFreeNode->free.prev == node);
12230 nextFreeNode->free.prev = node->free.prev;
12234 #if VMA_STATS_STRING_ENABLED
12235 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12239 case Node::TYPE_FREE:
12240 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12242 case Node::TYPE_ALLOCATION:
12244 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12245 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12246 if(allocSize < levelNodeSize)
12248 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12252 case Node::TYPE_SPLIT:
12254 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12255 const Node*
const leftChild = node->split.leftChild;
12256 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12257 const Node*
const rightChild = leftChild->buddy;
12258 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12265 #endif // #if VMA_STATS_STRING_ENABLED
12271 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12272 m_pMetadata(VMA_NULL),
12273 m_MemoryTypeIndex(UINT32_MAX),
12275 m_hMemory(VK_NULL_HANDLE),
12277 m_pMappedData(VMA_NULL)
12281 void VmaDeviceMemoryBlock::Init(
12284 uint32_t newMemoryTypeIndex,
12285 VkDeviceMemory newMemory,
12286 VkDeviceSize newSize,
12288 uint32_t algorithm)
12290 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12292 m_hParentPool = hParentPool;
12293 m_MemoryTypeIndex = newMemoryTypeIndex;
12295 m_hMemory = newMemory;
12300 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12303 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12309 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12311 m_pMetadata->Init(newSize);
12314 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12318 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12320 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12321 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12322 m_hMemory = VK_NULL_HANDLE;
12324 vma_delete(allocator, m_pMetadata);
12325 m_pMetadata = VMA_NULL;
12328 bool VmaDeviceMemoryBlock::Validate()
const
12330 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12331 (m_pMetadata->GetSize() != 0));
12333 return m_pMetadata->Validate();
12336 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12338 void* pData =
nullptr;
12339 VkResult res = Map(hAllocator, 1, &pData);
12340 if(res != VK_SUCCESS)
12345 res = m_pMetadata->CheckCorruption(pData);
12347 Unmap(hAllocator, 1);
12352 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12359 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12360 if(m_MapCount != 0)
12362 m_MapCount += count;
12363 VMA_ASSERT(m_pMappedData != VMA_NULL);
12364 if(ppData != VMA_NULL)
12366 *ppData = m_pMappedData;
12372 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12373 hAllocator->m_hDevice,
12379 if(result == VK_SUCCESS)
12381 if(ppData != VMA_NULL)
12383 *ppData = m_pMappedData;
12385 m_MapCount = count;
12391 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12398 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12399 if(m_MapCount >= count)
12401 m_MapCount -= count;
12402 if(m_MapCount == 0)
12404 m_pMappedData = VMA_NULL;
12405 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12410 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12414 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12416 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12417 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12420 VkResult res = Map(hAllocator, 1, &pData);
12421 if(res != VK_SUCCESS)
12426 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12427 VmaWriteMagicValue(pData, allocOffset + allocSize);
12429 Unmap(hAllocator, 1);
12434 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12436 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12437 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12440 VkResult res = Map(hAllocator, 1, &pData);
12441 if(res != VK_SUCCESS)
12446 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12448 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12450 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12452 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12455 Unmap(hAllocator, 1);
12460 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12463 VkDeviceSize allocationLocalOffset,
12467 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12468 hAllocation->GetBlock() ==
this);
12469 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12470 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12471 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12473 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12474 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12477 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12480 VkDeviceSize allocationLocalOffset,
12484 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12485 hAllocation->GetBlock() ==
this);
12486 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12487 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12488 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12490 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12491 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12496 memset(&outInfo, 0,
sizeof(outInfo));
12515 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12523 VmaPool_T::VmaPool_T(
12526 VkDeviceSize preferredBlockSize) :
12530 createInfo.memoryTypeIndex,
12531 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12532 createInfo.minBlockCount,
12533 createInfo.maxBlockCount,
12535 createInfo.frameInUseCount,
12536 createInfo.blockSize != 0,
12543 VmaPool_T::~VmaPool_T()
12547 void VmaPool_T::SetName(
const char* pName)
12549 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12550 VmaFreeString(allocs, m_Name);
12552 if(pName != VMA_NULL)
12554 m_Name = VmaCreateStringCopy(allocs, pName);
12562 #if VMA_STATS_STRING_ENABLED
12564 #endif // #if VMA_STATS_STRING_ENABLED
12566 VmaBlockVector::VmaBlockVector(
12569 uint32_t memoryTypeIndex,
12570 VkDeviceSize preferredBlockSize,
12571 size_t minBlockCount,
12572 size_t maxBlockCount,
12573 VkDeviceSize bufferImageGranularity,
12574 uint32_t frameInUseCount,
12575 bool explicitBlockSize,
12576 uint32_t algorithm) :
12577 m_hAllocator(hAllocator),
12578 m_hParentPool(hParentPool),
12579 m_MemoryTypeIndex(memoryTypeIndex),
12580 m_PreferredBlockSize(preferredBlockSize),
12581 m_MinBlockCount(minBlockCount),
12582 m_MaxBlockCount(maxBlockCount),
12583 m_BufferImageGranularity(bufferImageGranularity),
12584 m_FrameInUseCount(frameInUseCount),
12585 m_ExplicitBlockSize(explicitBlockSize),
12586 m_Algorithm(algorithm),
12587 m_HasEmptyBlock(false),
12588 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12593 VmaBlockVector::~VmaBlockVector()
12595 for(
size_t i = m_Blocks.size(); i--; )
12597 m_Blocks[i]->Destroy(m_hAllocator);
12598 vma_delete(m_hAllocator, m_Blocks[i]);
12602 VkResult VmaBlockVector::CreateMinBlocks()
12604 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12606 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12607 if(res != VK_SUCCESS)
12615 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12617 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12619 const size_t blockCount = m_Blocks.size();
12628 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12630 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12631 VMA_ASSERT(pBlock);
12632 VMA_HEAVY_ASSERT(pBlock->Validate());
12633 pBlock->m_pMetadata->AddPoolStats(*pStats);
12637 bool VmaBlockVector::IsEmpty()
12639 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12640 return m_Blocks.empty();
12643 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12645 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12646 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12647 (VMA_DEBUG_MARGIN > 0) &&
12649 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12652 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12654 VkResult VmaBlockVector::Allocate(
12655 uint32_t currentFrameIndex,
12657 VkDeviceSize alignment,
12659 VmaSuballocationType suballocType,
12660 size_t allocationCount,
12664 VkResult res = VK_SUCCESS;
12666 if(IsCorruptionDetectionEnabled())
12668 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12669 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12673 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12674 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12676 res = AllocatePage(
12682 pAllocations + allocIndex);
12683 if(res != VK_SUCCESS)
12690 if(res != VK_SUCCESS)
12693 while(allocIndex--)
12695 Free(pAllocations[allocIndex]);
12697 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12703 VkResult VmaBlockVector::AllocatePage(
12704 uint32_t currentFrameIndex,
12706 VkDeviceSize alignment,
12708 VmaSuballocationType suballocType,
12716 VkDeviceSize freeMemory;
12718 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12720 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12724 const bool canFallbackToDedicated = !IsCustomPool();
12725 const bool canCreateNewBlock =
12727 (m_Blocks.size() < m_MaxBlockCount) &&
12728 (freeMemory >= size || !canFallbackToDedicated);
12735 canMakeOtherLost =
false;
12739 if(isUpperAddress &&
12742 return VK_ERROR_FEATURE_NOT_PRESENT;
12756 return VK_ERROR_FEATURE_NOT_PRESENT;
12760 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12762 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12770 if(!canMakeOtherLost || canCreateNewBlock)
12779 if(!m_Blocks.empty())
12781 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12782 VMA_ASSERT(pCurrBlock);
12783 VkResult res = AllocateFromBlock(
12793 if(res == VK_SUCCESS)
12795 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12805 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12807 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12808 VMA_ASSERT(pCurrBlock);
12809 VkResult res = AllocateFromBlock(
12819 if(res == VK_SUCCESS)
12821 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12829 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12831 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12832 VMA_ASSERT(pCurrBlock);
12833 VkResult res = AllocateFromBlock(
12843 if(res == VK_SUCCESS)
12845 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12853 if(canCreateNewBlock)
12856 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12857 uint32_t newBlockSizeShift = 0;
12858 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12860 if(!m_ExplicitBlockSize)
12863 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12864 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12866 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12867 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12869 newBlockSize = smallerNewBlockSize;
12870 ++newBlockSizeShift;
12879 size_t newBlockIndex = 0;
12880 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12881 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12883 if(!m_ExplicitBlockSize)
12885 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12887 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12888 if(smallerNewBlockSize >= size)
12890 newBlockSize = smallerNewBlockSize;
12891 ++newBlockSizeShift;
12892 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12893 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12902 if(res == VK_SUCCESS)
12904 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12905 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12907 res = AllocateFromBlock(
12917 if(res == VK_SUCCESS)
12919 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12925 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12932 if(canMakeOtherLost)
12934 uint32_t tryIndex = 0;
12935 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12937 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12938 VmaAllocationRequest bestRequest = {};
12939 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12945 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12947 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12948 VMA_ASSERT(pCurrBlock);
12949 VmaAllocationRequest currRequest = {};
12950 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12953 m_BufferImageGranularity,
12962 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12963 if(pBestRequestBlock == VMA_NULL ||
12964 currRequestCost < bestRequestCost)
12966 pBestRequestBlock = pCurrBlock;
12967 bestRequest = currRequest;
12968 bestRequestCost = currRequestCost;
12970 if(bestRequestCost == 0)
12981 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12983 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12984 VMA_ASSERT(pCurrBlock);
12985 VmaAllocationRequest currRequest = {};
12986 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12989 m_BufferImageGranularity,
12998 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12999 if(pBestRequestBlock == VMA_NULL ||
13000 currRequestCost < bestRequestCost ||
13003 pBestRequestBlock = pCurrBlock;
13004 bestRequest = currRequest;
13005 bestRequestCost = currRequestCost;
13007 if(bestRequestCost == 0 ||
13017 if(pBestRequestBlock != VMA_NULL)
13021 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13022 if(res != VK_SUCCESS)
13028 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13034 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13035 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13036 UpdateHasEmptyBlock();
13037 (*pAllocation)->InitBlockAllocation(
13039 bestRequest.offset,
13046 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13047 VMA_DEBUG_LOG(
" Returned from existing block");
13048 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13049 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13050 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13052 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13054 if(IsCorruptionDetectionEnabled())
13056 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13057 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13072 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13074 return VK_ERROR_TOO_MANY_OBJECTS;
13078 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13081 void VmaBlockVector::Free(
13084 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13086 bool budgetExceeded =
false;
13088 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13090 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13091 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13096 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13098 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13100 if(IsCorruptionDetectionEnabled())
13102 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13103 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13106 if(hAllocation->IsPersistentMap())
13108 pBlock->Unmap(m_hAllocator, 1);
13111 pBlock->m_pMetadata->Free(hAllocation);
13112 VMA_HEAVY_ASSERT(pBlock->Validate());
13114 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13116 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13118 if(pBlock->m_pMetadata->IsEmpty())
13121 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13123 pBlockToDelete = pBlock;
13130 else if(m_HasEmptyBlock && canDeleteBlock)
13132 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13133 if(pLastBlock->m_pMetadata->IsEmpty())
13135 pBlockToDelete = pLastBlock;
13136 m_Blocks.pop_back();
13140 UpdateHasEmptyBlock();
13141 IncrementallySortBlocks();
13146 if(pBlockToDelete != VMA_NULL)
13148 VMA_DEBUG_LOG(
" Deleted empty block");
13149 pBlockToDelete->Destroy(m_hAllocator);
13150 vma_delete(m_hAllocator, pBlockToDelete);
13154 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13156 VkDeviceSize result = 0;
13157 for(
size_t i = m_Blocks.size(); i--; )
13159 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13160 if(result >= m_PreferredBlockSize)
13168 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13170 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13172 if(m_Blocks[blockIndex] == pBlock)
13174 VmaVectorRemove(m_Blocks, blockIndex);
13181 void VmaBlockVector::IncrementallySortBlocks()
13186 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13188 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13190 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13197 VkResult VmaBlockVector::AllocateFromBlock(
13198 VmaDeviceMemoryBlock* pBlock,
13199 uint32_t currentFrameIndex,
13201 VkDeviceSize alignment,
13204 VmaSuballocationType suballocType,
13213 VmaAllocationRequest currRequest = {};
13214 if(pBlock->m_pMetadata->CreateAllocationRequest(
13217 m_BufferImageGranularity,
13227 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13231 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13232 if(res != VK_SUCCESS)
13238 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13239 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13240 UpdateHasEmptyBlock();
13241 (*pAllocation)->InitBlockAllocation(
13243 currRequest.offset,
13250 VMA_HEAVY_ASSERT(pBlock->Validate());
13251 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13252 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13253 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13255 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13257 if(IsCorruptionDetectionEnabled())
13259 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13260 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13264 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13267 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13269 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13270 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13271 allocInfo.allocationSize = blockSize;
13273 #if VMA_BUFFER_DEVICE_ADDRESS
13275 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13276 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13278 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13279 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13281 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
13283 VkDeviceMemory mem = VK_NULL_HANDLE;
13284 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13293 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13299 allocInfo.allocationSize,
13303 m_Blocks.push_back(pBlock);
13304 if(pNewBlockIndex != VMA_NULL)
13306 *pNewBlockIndex = m_Blocks.size() - 1;
13312 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13313 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13314 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13316 const size_t blockCount = m_Blocks.size();
13317 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13321 BLOCK_FLAG_USED = 0x00000001,
13322 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13330 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13331 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13332 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13335 const size_t moveCount = moves.size();
13336 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13338 const VmaDefragmentationMove& move = moves[moveIndex];
13339 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13340 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13343 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13346 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13348 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13349 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13350 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13352 currBlockInfo.pMappedData = pBlock->GetMappedData();
13354 if(currBlockInfo.pMappedData == VMA_NULL)
13356 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13357 if(pDefragCtx->res == VK_SUCCESS)
13359 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13366 if(pDefragCtx->res == VK_SUCCESS)
13368 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13369 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13371 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13373 const VmaDefragmentationMove& move = moves[moveIndex];
13375 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13376 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13378 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13383 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13384 memRange.memory = pSrcBlock->GetDeviceMemory();
13385 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13386 memRange.size = VMA_MIN(
13387 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13388 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13389 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13394 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13395 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13396 static_cast<size_t>(move.size));
13398 if(IsCorruptionDetectionEnabled())
13400 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13401 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13407 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13408 memRange.memory = pDstBlock->GetDeviceMemory();
13409 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13410 memRange.size = VMA_MIN(
13411 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13412 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13413 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13420 for(
size_t blockIndex = blockCount; blockIndex--; )
13422 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13423 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13425 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13426 pBlock->Unmap(m_hAllocator, 1);
13431 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13432 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13433 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13434 VkCommandBuffer commandBuffer)
13436 const size_t blockCount = m_Blocks.size();
13438 pDefragCtx->blockContexts.resize(blockCount);
13439 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13442 const size_t moveCount = moves.size();
13443 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13445 const VmaDefragmentationMove& move = moves[moveIndex];
13450 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13451 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13455 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13459 VkBufferCreateInfo bufCreateInfo;
13460 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13462 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13464 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13465 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13466 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13468 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13469 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13470 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13471 if(pDefragCtx->res == VK_SUCCESS)
13473 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13474 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13481 if(pDefragCtx->res == VK_SUCCESS)
13483 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13485 const VmaDefragmentationMove& move = moves[moveIndex];
13487 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13488 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13490 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13492 VkBufferCopy region = {
13496 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13497 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13502 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13504 pDefragCtx->res = VK_NOT_READY;
13510 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13512 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13513 if(pBlock->m_pMetadata->IsEmpty())
13515 if(m_Blocks.size() > m_MinBlockCount)
13517 if(pDefragmentationStats != VMA_NULL)
13520 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13523 VmaVectorRemove(m_Blocks, blockIndex);
13524 pBlock->Destroy(m_hAllocator);
13525 vma_delete(m_hAllocator, pBlock);
13533 UpdateHasEmptyBlock();
13536 void VmaBlockVector::UpdateHasEmptyBlock()
13538 m_HasEmptyBlock =
false;
13539 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13541 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13542 if(pBlock->m_pMetadata->IsEmpty())
13544 m_HasEmptyBlock =
true;
13550 #if VMA_STATS_STRING_ENABLED
13552 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13554 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13556 json.BeginObject();
13560 const char* poolName = m_hParentPool->GetName();
13561 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13563 json.WriteString(
"Name");
13564 json.WriteString(poolName);
13567 json.WriteString(
"MemoryTypeIndex");
13568 json.WriteNumber(m_MemoryTypeIndex);
13570 json.WriteString(
"BlockSize");
13571 json.WriteNumber(m_PreferredBlockSize);
13573 json.WriteString(
"BlockCount");
13574 json.BeginObject(
true);
13575 if(m_MinBlockCount > 0)
13577 json.WriteString(
"Min");
13578 json.WriteNumber((uint64_t)m_MinBlockCount);
13580 if(m_MaxBlockCount < SIZE_MAX)
13582 json.WriteString(
"Max");
13583 json.WriteNumber((uint64_t)m_MaxBlockCount);
13585 json.WriteString(
"Cur");
13586 json.WriteNumber((uint64_t)m_Blocks.size());
13589 if(m_FrameInUseCount > 0)
13591 json.WriteString(
"FrameInUseCount");
13592 json.WriteNumber(m_FrameInUseCount);
13595 if(m_Algorithm != 0)
13597 json.WriteString(
"Algorithm");
13598 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13603 json.WriteString(
"PreferredBlockSize");
13604 json.WriteNumber(m_PreferredBlockSize);
13607 json.WriteString(
"Blocks");
13608 json.BeginObject();
13609 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13611 json.BeginString();
13612 json.ContinueString(m_Blocks[i]->GetId());
13615 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13622 #endif // #if VMA_STATS_STRING_ENABLED
13624 void VmaBlockVector::Defragment(
13625 class VmaBlockVectorDefragmentationContext* pCtx,
13627 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13628 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13629 VkCommandBuffer commandBuffer)
13631 pCtx->res = VK_SUCCESS;
13633 const VkMemoryPropertyFlags memPropFlags =
13634 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13635 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13637 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13639 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13640 !IsCorruptionDetectionEnabled() &&
13641 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13644 if(canDefragmentOnCpu || canDefragmentOnGpu)
13646 bool defragmentOnGpu;
13648 if(canDefragmentOnGpu != canDefragmentOnCpu)
13650 defragmentOnGpu = canDefragmentOnGpu;
13655 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13656 m_hAllocator->IsIntegratedGpu();
13659 bool overlappingMoveSupported = !defragmentOnGpu;
13661 if(m_hAllocator->m_UseMutex)
13665 if(!m_Mutex.TryLockWrite())
13667 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13673 m_Mutex.LockWrite();
13674 pCtx->mutexLocked =
true;
13678 pCtx->Begin(overlappingMoveSupported, flags);
13682 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13683 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13684 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13687 if(pStats != VMA_NULL)
13689 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13690 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13693 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13694 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13695 if(defragmentOnGpu)
13697 maxGpuBytesToMove -= bytesMoved;
13698 maxGpuAllocationsToMove -= allocationsMoved;
13702 maxCpuBytesToMove -= bytesMoved;
13703 maxCpuAllocationsToMove -= allocationsMoved;
13709 if(m_hAllocator->m_UseMutex)
13710 m_Mutex.UnlockWrite();
13712 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13713 pCtx->res = VK_NOT_READY;
13718 if(pCtx->res >= VK_SUCCESS)
13720 if(defragmentOnGpu)
13722 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13726 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13732 void VmaBlockVector::DefragmentationEnd(
13733 class VmaBlockVectorDefragmentationContext* pCtx,
13739 VMA_ASSERT(pCtx->mutexLocked ==
false);
13743 m_Mutex.LockWrite();
13744 pCtx->mutexLocked =
true;
13748 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13751 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13753 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13754 if(blockCtx.hBuffer)
13756 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13760 if(pCtx->res >= VK_SUCCESS)
13762 FreeEmptyBlocks(pStats);
13766 if(pCtx->mutexLocked)
13768 VMA_ASSERT(m_hAllocator->m_UseMutex);
13769 m_Mutex.UnlockWrite();
13773 uint32_t VmaBlockVector::ProcessDefragmentations(
13774 class VmaBlockVectorDefragmentationContext *pCtx,
13777 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13779 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13781 for(uint32_t i = 0; i < moveCount; ++ i)
13783 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13786 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13787 pMove->
offset = move.dstOffset;
13792 pCtx->defragmentationMovesProcessed += moveCount;
13797 void VmaBlockVector::CommitDefragmentations(
13798 class VmaBlockVectorDefragmentationContext *pCtx,
13801 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13803 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13805 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13807 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13808 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13811 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13812 FreeEmptyBlocks(pStats);
13815 size_t VmaBlockVector::CalcAllocationCount()
const
13818 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13820 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13825 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13827 if(m_BufferImageGranularity == 1)
13831 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13832 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13834 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13835 VMA_ASSERT(m_Algorithm == 0);
13836 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13837 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13845 void VmaBlockVector::MakePoolAllocationsLost(
13846 uint32_t currentFrameIndex,
13847 size_t* pLostAllocationCount)
13849 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13850 size_t lostAllocationCount = 0;
13851 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13853 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13854 VMA_ASSERT(pBlock);
13855 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13857 if(pLostAllocationCount != VMA_NULL)
13859 *pLostAllocationCount = lostAllocationCount;
13863 VkResult VmaBlockVector::CheckCorruption()
13865 if(!IsCorruptionDetectionEnabled())
13867 return VK_ERROR_FEATURE_NOT_PRESENT;
13870 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13871 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13873 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13874 VMA_ASSERT(pBlock);
13875 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13876 if(res != VK_SUCCESS)
13884 void VmaBlockVector::AddStats(
VmaStats* pStats)
13886 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13887 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13889 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13891 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13893 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13894 VMA_ASSERT(pBlock);
13895 VMA_HEAVY_ASSERT(pBlock->Validate());
13897 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13898 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13899 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13900 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13907 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13909 VmaBlockVector* pBlockVector,
13910 uint32_t currentFrameIndex,
13911 bool overlappingMoveSupported) :
13912 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13913 m_AllocationCount(0),
13914 m_AllAllocations(false),
13916 m_AllocationsMoved(0),
13917 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13920 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13921 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13923 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13924 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13925 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13926 m_Blocks.push_back(pBlockInfo);
13930 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13933 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13935 for(
size_t i = m_Blocks.size(); i--; )
13937 vma_delete(m_hAllocator, m_Blocks[i]);
13941 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13944 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13946 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13947 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13948 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13950 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13951 (*it)->m_Allocations.push_back(allocInfo);
13958 ++m_AllocationCount;
13962 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13963 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13964 VkDeviceSize maxBytesToMove,
13965 uint32_t maxAllocationsToMove,
13966 bool freeOldAllocations)
13968 if(m_Blocks.empty())
13981 size_t srcBlockMinIndex = 0;
13994 size_t srcBlockIndex = m_Blocks.size() - 1;
13995 size_t srcAllocIndex = SIZE_MAX;
14001 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14003 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14006 if(srcBlockIndex == srcBlockMinIndex)
14013 srcAllocIndex = SIZE_MAX;
14018 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14022 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14023 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14025 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14026 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14027 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14028 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14031 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14033 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14034 VmaAllocationRequest dstAllocRequest;
14035 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14036 m_CurrentFrameIndex,
14037 m_pBlockVector->GetFrameInUseCount(),
14038 m_pBlockVector->GetBufferImageGranularity(),
14045 &dstAllocRequest) &&
14047 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14049 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14052 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14053 (m_BytesMoved + size > maxBytesToMove))
14058 VmaDefragmentationMove move = {};
14059 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14060 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14061 move.srcOffset = srcOffset;
14062 move.dstOffset = dstAllocRequest.offset;
14064 move.hAllocation = allocInfo.m_hAllocation;
14065 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14066 move.pDstBlock = pDstBlockInfo->m_pBlock;
14068 moves.push_back(move);
14070 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14074 allocInfo.m_hAllocation);
14076 if(freeOldAllocations)
14078 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14079 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14082 if(allocInfo.m_pChanged != VMA_NULL)
14084 *allocInfo.m_pChanged = VK_TRUE;
14087 ++m_AllocationsMoved;
14088 m_BytesMoved += size;
14090 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14098 if(srcAllocIndex > 0)
14104 if(srcBlockIndex > 0)
14107 srcAllocIndex = SIZE_MAX;
14117 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14120 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14122 if(m_Blocks[i]->m_HasNonMovableAllocations)
14130 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14131 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14132 VkDeviceSize maxBytesToMove,
14133 uint32_t maxAllocationsToMove,
14136 if(!m_AllAllocations && m_AllocationCount == 0)
14141 const size_t blockCount = m_Blocks.size();
14142 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14144 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14146 if(m_AllAllocations)
14148 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14149 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14150 it != pMetadata->m_Suballocations.end();
14153 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14155 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14156 pBlockInfo->m_Allocations.push_back(allocInfo);
14161 pBlockInfo->CalcHasNonMovableAllocations();
14165 pBlockInfo->SortAllocationsByOffsetDescending();
14171 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14174 const uint32_t roundCount = 2;
14177 VkResult result = VK_SUCCESS;
14178 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14186 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14187 size_t dstBlockIndex, VkDeviceSize dstOffset,
14188 size_t srcBlockIndex, VkDeviceSize srcOffset)
14190 if(dstBlockIndex < srcBlockIndex)
14194 if(dstBlockIndex > srcBlockIndex)
14198 if(dstOffset < srcOffset)
14208 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14210 VmaBlockVector* pBlockVector,
14211 uint32_t currentFrameIndex,
14212 bool overlappingMoveSupported) :
14213 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14214 m_OverlappingMoveSupported(overlappingMoveSupported),
14215 m_AllocationCount(0),
14216 m_AllAllocations(false),
14218 m_AllocationsMoved(0),
14219 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14221 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14225 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14229 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14230 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14231 VkDeviceSize maxBytesToMove,
14232 uint32_t maxAllocationsToMove,
14235 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14237 const size_t blockCount = m_pBlockVector->GetBlockCount();
14238 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14243 PreprocessMetadata();
14247 m_BlockInfos.resize(blockCount);
14248 for(
size_t i = 0; i < blockCount; ++i)
14250 m_BlockInfos[i].origBlockIndex = i;
14253 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14254 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14255 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14260 FreeSpaceDatabase freeSpaceDb;
14262 size_t dstBlockInfoIndex = 0;
14263 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14264 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14265 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14266 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14267 VkDeviceSize dstOffset = 0;
14270 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14272 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14273 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14274 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14275 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14276 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14278 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14279 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14280 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14281 if(m_AllocationsMoved == maxAllocationsToMove ||
14282 m_BytesMoved + srcAllocSize > maxBytesToMove)
14287 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14289 VmaDefragmentationMove move = {};
14291 size_t freeSpaceInfoIndex;
14292 VkDeviceSize dstAllocOffset;
14293 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14294 freeSpaceInfoIndex, dstAllocOffset))
14296 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14297 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14298 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14301 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14303 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14307 VmaSuballocation suballoc = *srcSuballocIt;
14308 suballoc.offset = dstAllocOffset;
14309 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14310 m_BytesMoved += srcAllocSize;
14311 ++m_AllocationsMoved;
14313 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14315 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14316 srcSuballocIt = nextSuballocIt;
14318 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14320 move.srcBlockIndex = srcOrigBlockIndex;
14321 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14322 move.srcOffset = srcAllocOffset;
14323 move.dstOffset = dstAllocOffset;
14324 move.size = srcAllocSize;
14326 moves.push_back(move);
14333 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14335 VmaSuballocation suballoc = *srcSuballocIt;
14336 suballoc.offset = dstAllocOffset;
14337 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14338 m_BytesMoved += srcAllocSize;
14339 ++m_AllocationsMoved;
14341 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14343 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14344 srcSuballocIt = nextSuballocIt;
14346 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14348 move.srcBlockIndex = srcOrigBlockIndex;
14349 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14350 move.srcOffset = srcAllocOffset;
14351 move.dstOffset = dstAllocOffset;
14352 move.size = srcAllocSize;
14354 moves.push_back(move);
14359 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14362 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14363 dstAllocOffset + srcAllocSize > dstBlockSize)
14366 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14368 ++dstBlockInfoIndex;
14369 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14370 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14371 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14372 dstBlockSize = pDstMetadata->GetSize();
14374 dstAllocOffset = 0;
14378 if(dstBlockInfoIndex == srcBlockInfoIndex)
14380 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14382 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14384 bool skipOver = overlap;
14385 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14389 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14394 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14396 dstOffset = srcAllocOffset + srcAllocSize;
14402 srcSuballocIt->offset = dstAllocOffset;
14403 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14404 dstOffset = dstAllocOffset + srcAllocSize;
14405 m_BytesMoved += srcAllocSize;
14406 ++m_AllocationsMoved;
14409 move.srcBlockIndex = srcOrigBlockIndex;
14410 move.dstBlockIndex = dstOrigBlockIndex;
14411 move.srcOffset = srcAllocOffset;
14412 move.dstOffset = dstAllocOffset;
14413 move.size = srcAllocSize;
14415 moves.push_back(move);
14423 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14424 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14426 VmaSuballocation suballoc = *srcSuballocIt;
14427 suballoc.offset = dstAllocOffset;
14428 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14429 dstOffset = dstAllocOffset + srcAllocSize;
14430 m_BytesMoved += srcAllocSize;
14431 ++m_AllocationsMoved;
14433 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14435 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14436 srcSuballocIt = nextSuballocIt;
14438 pDstMetadata->m_Suballocations.push_back(suballoc);
14440 move.srcBlockIndex = srcOrigBlockIndex;
14441 move.dstBlockIndex = dstOrigBlockIndex;
14442 move.srcOffset = srcAllocOffset;
14443 move.dstOffset = dstAllocOffset;
14444 move.size = srcAllocSize;
14446 moves.push_back(move);
14452 m_BlockInfos.clear();
14454 PostprocessMetadata();
14459 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14461 const size_t blockCount = m_pBlockVector->GetBlockCount();
14462 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14464 VmaBlockMetadata_Generic*
const pMetadata =
14465 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14466 pMetadata->m_FreeCount = 0;
14467 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14468 pMetadata->m_FreeSuballocationsBySize.clear();
14469 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14470 it != pMetadata->m_Suballocations.end(); )
14472 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14474 VmaSuballocationList::iterator nextIt = it;
14476 pMetadata->m_Suballocations.erase(it);
14487 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14489 const size_t blockCount = m_pBlockVector->GetBlockCount();
14490 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14492 VmaBlockMetadata_Generic*
const pMetadata =
14493 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14494 const VkDeviceSize blockSize = pMetadata->GetSize();
14497 if(pMetadata->m_Suballocations.empty())
14499 pMetadata->m_FreeCount = 1;
14501 VmaSuballocation suballoc = {
14505 VMA_SUBALLOCATION_TYPE_FREE };
14506 pMetadata->m_Suballocations.push_back(suballoc);
14507 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14512 VkDeviceSize offset = 0;
14513 VmaSuballocationList::iterator it;
14514 for(it = pMetadata->m_Suballocations.begin();
14515 it != pMetadata->m_Suballocations.end();
14518 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14519 VMA_ASSERT(it->offset >= offset);
14522 if(it->offset > offset)
14524 ++pMetadata->m_FreeCount;
14525 const VkDeviceSize freeSize = it->offset - offset;
14526 VmaSuballocation suballoc = {
14530 VMA_SUBALLOCATION_TYPE_FREE };
14531 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14532 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14534 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14538 pMetadata->m_SumFreeSize -= it->size;
14539 offset = it->offset + it->size;
14543 if(offset < blockSize)
14545 ++pMetadata->m_FreeCount;
14546 const VkDeviceSize freeSize = blockSize - offset;
14547 VmaSuballocation suballoc = {
14551 VMA_SUBALLOCATION_TYPE_FREE };
14552 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14553 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14554 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14556 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14561 pMetadata->m_FreeSuballocationsBySize.begin(),
14562 pMetadata->m_FreeSuballocationsBySize.end(),
14563 VmaSuballocationItemSizeLess());
14566 VMA_HEAVY_ASSERT(pMetadata->Validate());
14570 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14573 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14574 while(it != pMetadata->m_Suballocations.end())
14576 if(it->offset < suballoc.offset)
14581 pMetadata->m_Suballocations.insert(it, suballoc);
14587 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14590 VmaBlockVector* pBlockVector,
14591 uint32_t currFrameIndex) :
14593 mutexLocked(false),
14594 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14595 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14596 defragmentationMovesProcessed(0),
14597 defragmentationMovesCommitted(0),
14598 hasDefragmentationPlan(0),
14599 m_hAllocator(hAllocator),
14600 m_hCustomPool(hCustomPool),
14601 m_pBlockVector(pBlockVector),
14602 m_CurrFrameIndex(currFrameIndex),
14603 m_pAlgorithm(VMA_NULL),
14604 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14605 m_AllAllocations(false)
14609 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14611 vma_delete(m_hAllocator, m_pAlgorithm);
14614 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14616 AllocInfo info = { hAlloc, pChanged };
14617 m_Allocations.push_back(info);
14620 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14622 const bool allAllocations = m_AllAllocations ||
14623 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14636 if(VMA_DEBUG_MARGIN == 0 &&
14638 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14641 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14642 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14646 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14647 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14652 m_pAlgorithm->AddAll();
14656 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14658 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14666 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14668 uint32_t currFrameIndex,
14671 m_hAllocator(hAllocator),
14672 m_CurrFrameIndex(currFrameIndex),
14675 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14677 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14680 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14682 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14684 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14685 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14686 vma_delete(m_hAllocator, pBlockVectorCtx);
14688 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14690 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14691 if(pBlockVectorCtx)
14693 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14694 vma_delete(m_hAllocator, pBlockVectorCtx);
14699 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14701 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14703 VmaPool pool = pPools[poolIndex];
14706 if(pool->m_BlockVector.GetAlgorithm() == 0)
14708 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14710 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14712 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14714 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14719 if(!pBlockVectorDefragCtx)
14721 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14724 &pool->m_BlockVector,
14726 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14729 pBlockVectorDefragCtx->AddAll();
14734 void VmaDefragmentationContext_T::AddAllocations(
14735 uint32_t allocationCount,
14737 VkBool32* pAllocationsChanged)
14740 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14743 VMA_ASSERT(hAlloc);
14745 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14747 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14749 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14751 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14753 if(hAllocPool != VK_NULL_HANDLE)
14756 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14758 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14760 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14762 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14766 if(!pBlockVectorDefragCtx)
14768 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14771 &hAllocPool->m_BlockVector,
14773 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14780 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14781 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14782 if(!pBlockVectorDefragCtx)
14784 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14787 m_hAllocator->m_pBlockVectors[memTypeIndex],
14789 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14793 if(pBlockVectorDefragCtx)
14795 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14796 &pAllocationsChanged[allocIndex] : VMA_NULL;
14797 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14803 VkResult VmaDefragmentationContext_T::Defragment(
14804 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14805 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14817 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14818 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14820 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14821 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14823 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14824 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14827 return VK_NOT_READY;
14830 if(commandBuffer == VK_NULL_HANDLE)
14832 maxGpuBytesToMove = 0;
14833 maxGpuAllocationsToMove = 0;
14836 VkResult res = VK_SUCCESS;
14839 for(uint32_t memTypeIndex = 0;
14840 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14843 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14844 if(pBlockVectorCtx)
14846 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14847 pBlockVectorCtx->GetBlockVector()->Defragment(
14850 maxCpuBytesToMove, maxCpuAllocationsToMove,
14851 maxGpuBytesToMove, maxGpuAllocationsToMove,
14853 if(pBlockVectorCtx->res != VK_SUCCESS)
14855 res = pBlockVectorCtx->res;
14861 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14862 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14865 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14866 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14867 pBlockVectorCtx->GetBlockVector()->Defragment(
14870 maxCpuBytesToMove, maxCpuAllocationsToMove,
14871 maxGpuBytesToMove, maxGpuAllocationsToMove,
14873 if(pBlockVectorCtx->res != VK_SUCCESS)
14875 res = pBlockVectorCtx->res;
14888 for(uint32_t memTypeIndex = 0;
14889 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14892 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14893 if(pBlockVectorCtx)
14895 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14897 if(!pBlockVectorCtx->hasDefragmentationPlan)
14899 pBlockVectorCtx->GetBlockVector()->Defragment(
14902 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14903 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14906 if(pBlockVectorCtx->res < VK_SUCCESS)
14909 pBlockVectorCtx->hasDefragmentationPlan =
true;
14912 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14914 pCurrentMove, movesLeft);
14916 movesLeft -= processed;
14917 pCurrentMove += processed;
14922 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14923 customCtxIndex < customCtxCount;
14926 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14927 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14929 if(!pBlockVectorCtx->hasDefragmentationPlan)
14931 pBlockVectorCtx->GetBlockVector()->Defragment(
14934 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14935 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14938 if(pBlockVectorCtx->res < VK_SUCCESS)
14941 pBlockVectorCtx->hasDefragmentationPlan =
true;
14944 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14946 pCurrentMove, movesLeft);
14948 movesLeft -= processed;
14949 pCurrentMove += processed;
14956 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14958 VkResult res = VK_SUCCESS;
14961 for(uint32_t memTypeIndex = 0;
14962 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14965 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14966 if(pBlockVectorCtx)
14968 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14970 if(!pBlockVectorCtx->hasDefragmentationPlan)
14972 res = VK_NOT_READY;
14976 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14977 pBlockVectorCtx, m_pStats);
14979 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14980 res = VK_NOT_READY;
14985 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14986 customCtxIndex < customCtxCount;
14989 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14990 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14992 if(!pBlockVectorCtx->hasDefragmentationPlan)
14994 res = VK_NOT_READY;
14998 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14999 pBlockVectorCtx, m_pStats);
15001 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15002 res = VK_NOT_READY;
15011 #if VMA_RECORDING_ENABLED
15013 VmaRecorder::VmaRecorder() :
15017 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15023 m_UseMutex = useMutex;
15024 m_Flags = settings.
flags;
15026 #if defined(_WIN32)
15028 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15032 return VK_ERROR_INITIALIZATION_FAILED;
15036 m_File = fopen(settings.
pFilePath,
"wb");
15040 return VK_ERROR_INITIALIZATION_FAILED;
15045 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15046 fprintf(m_File,
"%s\n",
"1,8");
15051 VmaRecorder::~VmaRecorder()
15053 if(m_File != VMA_NULL)
15059 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15061 CallParams callParams;
15062 GetBasicParams(callParams);
15064 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15065 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15069 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15071 CallParams callParams;
15072 GetBasicParams(callParams);
15074 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15075 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15081 CallParams callParams;
15082 GetBasicParams(callParams);
15084 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15085 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15096 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15098 CallParams callParams;
15099 GetBasicParams(callParams);
15101 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15102 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15107 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15108 const VkMemoryRequirements& vkMemReq,
15112 CallParams callParams;
15113 GetBasicParams(callParams);
15115 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15116 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15117 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15119 vkMemReq.alignment,
15120 vkMemReq.memoryTypeBits,
15128 userDataStr.GetString());
15132 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15133 const VkMemoryRequirements& vkMemReq,
15135 uint64_t allocationCount,
15138 CallParams callParams;
15139 GetBasicParams(callParams);
15141 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15142 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15143 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15145 vkMemReq.alignment,
15146 vkMemReq.memoryTypeBits,
15153 PrintPointerList(allocationCount, pAllocations);
15154 fprintf(m_File,
",%s\n", userDataStr.GetString());
15158 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15159 const VkMemoryRequirements& vkMemReq,
15160 bool requiresDedicatedAllocation,
15161 bool prefersDedicatedAllocation,
15165 CallParams callParams;
15166 GetBasicParams(callParams);
15168 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15169 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15170 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15172 vkMemReq.alignment,
15173 vkMemReq.memoryTypeBits,
15174 requiresDedicatedAllocation ? 1 : 0,
15175 prefersDedicatedAllocation ? 1 : 0,
15183 userDataStr.GetString());
15187 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15188 const VkMemoryRequirements& vkMemReq,
15189 bool requiresDedicatedAllocation,
15190 bool prefersDedicatedAllocation,
15194 CallParams callParams;
15195 GetBasicParams(callParams);
15197 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15198 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15199 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15201 vkMemReq.alignment,
15202 vkMemReq.memoryTypeBits,
15203 requiresDedicatedAllocation ? 1 : 0,
15204 prefersDedicatedAllocation ? 1 : 0,
15212 userDataStr.GetString());
15216 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15219 CallParams callParams;
15220 GetBasicParams(callParams);
15222 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15223 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15228 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15229 uint64_t allocationCount,
15232 CallParams callParams;
15233 GetBasicParams(callParams);
15235 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15236 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15237 PrintPointerList(allocationCount, pAllocations);
15238 fprintf(m_File,
"\n");
15242 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15244 const void* pUserData)
15246 CallParams callParams;
15247 GetBasicParams(callParams);
15249 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15250 UserDataString userDataStr(
15253 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15255 userDataStr.GetString());
15259 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15262 CallParams callParams;
15263 GetBasicParams(callParams);
15265 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15266 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15271 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15274 CallParams callParams;
15275 GetBasicParams(callParams);
15277 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15278 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15283 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15286 CallParams callParams;
15287 GetBasicParams(callParams);
15289 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15290 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15295 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15296 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15298 CallParams callParams;
15299 GetBasicParams(callParams);
15301 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15302 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15309 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15310 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15312 CallParams callParams;
15313 GetBasicParams(callParams);
15315 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15316 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15323 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15324 const VkBufferCreateInfo& bufCreateInfo,
15328 CallParams callParams;
15329 GetBasicParams(callParams);
15331 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15332 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15333 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15334 bufCreateInfo.flags,
15335 bufCreateInfo.size,
15336 bufCreateInfo.usage,
15337 bufCreateInfo.sharingMode,
15338 allocCreateInfo.
flags,
15339 allocCreateInfo.
usage,
15343 allocCreateInfo.
pool,
15345 userDataStr.GetString());
15349 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15350 const VkImageCreateInfo& imageCreateInfo,
15354 CallParams callParams;
15355 GetBasicParams(callParams);
15357 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15358 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15359 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15360 imageCreateInfo.flags,
15361 imageCreateInfo.imageType,
15362 imageCreateInfo.format,
15363 imageCreateInfo.extent.width,
15364 imageCreateInfo.extent.height,
15365 imageCreateInfo.extent.depth,
15366 imageCreateInfo.mipLevels,
15367 imageCreateInfo.arrayLayers,
15368 imageCreateInfo.samples,
15369 imageCreateInfo.tiling,
15370 imageCreateInfo.usage,
15371 imageCreateInfo.sharingMode,
15372 imageCreateInfo.initialLayout,
15373 allocCreateInfo.
flags,
15374 allocCreateInfo.
usage,
15378 allocCreateInfo.
pool,
15380 userDataStr.GetString());
15384 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15387 CallParams callParams;
15388 GetBasicParams(callParams);
15390 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15391 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15396 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15399 CallParams callParams;
15400 GetBasicParams(callParams);
15402 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15403 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15408 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15411 CallParams callParams;
15412 GetBasicParams(callParams);
15414 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15415 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15420 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15423 CallParams callParams;
15424 GetBasicParams(callParams);
15426 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15427 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15432 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15435 CallParams callParams;
15436 GetBasicParams(callParams);
15438 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15439 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15444 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15448 CallParams callParams;
15449 GetBasicParams(callParams);
15451 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15452 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15455 fprintf(m_File,
",");
15457 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15467 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15470 CallParams callParams;
15471 GetBasicParams(callParams);
15473 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15474 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15479 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15483 CallParams callParams;
15484 GetBasicParams(callParams);
15486 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15487 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15488 pool, name != VMA_NULL ? name :
"");
15494 if(pUserData != VMA_NULL)
15498 m_Str = (
const char*)pUserData;
15503 snprintf(m_PtrStr, 17,
"%p", pUserData);
15513 void VmaRecorder::WriteConfiguration(
15514 const VkPhysicalDeviceProperties& devProps,
15515 const VkPhysicalDeviceMemoryProperties& memProps,
15516 uint32_t vulkanApiVersion,
15517 bool dedicatedAllocationExtensionEnabled,
15518 bool bindMemory2ExtensionEnabled,
15519 bool memoryBudgetExtensionEnabled,
15520 bool deviceCoherentMemoryExtensionEnabled)
15522 fprintf(m_File,
"Config,Begin\n");
15524 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15526 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15527 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15528 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15529 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15530 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15531 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15533 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15534 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15535 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15537 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15538 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15540 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15541 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15543 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15544 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15546 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15547 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15550 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15551 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15552 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15553 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15555 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15556 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15557 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15558 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15559 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15560 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15561 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15562 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15563 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15565 fprintf(m_File,
"Config,End\n");
15568 void VmaRecorder::GetBasicParams(CallParams& outParams)
15570 #if defined(_WIN32)
15571 outParams.threadId = GetCurrentThreadId();
15576 std::thread::id thread_id = std::this_thread::get_id();
15577 stringstream thread_id_to_string_converter;
15578 thread_id_to_string_converter << thread_id;
15579 string thread_id_as_string = thread_id_to_string_converter.str();
15580 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15583 auto current_time = std::chrono::high_resolution_clock::now();
15585 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15588 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15592 fprintf(m_File,
"%p", pItems[0]);
15593 for(uint64_t i = 1; i < count; ++i)
15595 fprintf(m_File,
" %p", pItems[i]);
15600 void VmaRecorder::Flush()
15608 #endif // #if VMA_RECORDING_ENABLED
15613 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15614 m_Allocator(pAllocationCallbacks, 1024)
15618 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15620 VmaMutexLock mutexLock(m_Mutex);
15621 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15624 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15626 VmaMutexLock mutexLock(m_Mutex);
15627 m_Allocator.Free(hAlloc);
15635 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15641 m_hDevice(pCreateInfo->device),
15642 m_hInstance(pCreateInfo->instance),
15643 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15644 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15645 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15646 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15647 m_HeapSizeLimitMask(0),
15648 m_PreferredLargeHeapBlockSize(0),
15649 m_PhysicalDevice(pCreateInfo->physicalDevice),
15650 m_CurrentFrameIndex(0),
15651 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15652 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15654 m_GlobalMemoryTypeBits(UINT32_MAX)
15656 ,m_pRecorder(VMA_NULL)
15659 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15661 m_UseKhrDedicatedAllocation =
false;
15662 m_UseKhrBindMemory2 =
false;
15665 if(VMA_DEBUG_DETECT_CORRUPTION)
15668 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15673 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15675 #if !(VMA_DEDICATED_ALLOCATION)
15678 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15681 #if !(VMA_BIND_MEMORY2)
15684 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15688 #if !(VMA_MEMORY_BUDGET)
15691 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15694 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15695 if(m_UseKhrBufferDeviceAddress)
15697 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15700 #if VMA_VULKAN_VERSION < 1002000
15701 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15703 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15706 #if VMA_VULKAN_VERSION < 1001000
15707 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15709 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15713 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15714 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15715 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15717 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15718 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15719 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15730 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15731 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15733 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15734 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15735 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15736 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15741 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15745 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15747 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15748 if(limit != VK_WHOLE_SIZE)
15750 m_HeapSizeLimitMask |= 1u << heapIndex;
15751 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15753 m_MemProps.memoryHeaps[heapIndex].size = limit;
15759 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15761 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15763 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15767 preferredBlockSize,
15770 GetBufferImageGranularity(),
15776 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15783 VkResult res = VK_SUCCESS;
15788 #if VMA_RECORDING_ENABLED
15789 m_pRecorder = vma_new(
this, VmaRecorder)();
15791 if(res != VK_SUCCESS)
15795 m_pRecorder->WriteConfiguration(
15796 m_PhysicalDeviceProperties,
15798 m_VulkanApiVersion,
15799 m_UseKhrDedicatedAllocation,
15800 m_UseKhrBindMemory2,
15801 m_UseExtMemoryBudget,
15802 m_UseAmdDeviceCoherentMemory);
15803 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15805 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15806 return VK_ERROR_FEATURE_NOT_PRESENT;
15810 #if VMA_MEMORY_BUDGET
15811 if(m_UseExtMemoryBudget)
15813 UpdateVulkanBudget();
15815 #endif // #if VMA_MEMORY_BUDGET
15820 VmaAllocator_T::~VmaAllocator_T()
15822 #if VMA_RECORDING_ENABLED
15823 if(m_pRecorder != VMA_NULL)
15825 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15826 vma_delete(
this, m_pRecorder);
15830 VMA_ASSERT(m_Pools.empty());
15832 for(
size_t i = GetMemoryTypeCount(); i--; )
15834 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15836 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15839 vma_delete(
this, m_pDedicatedAllocations[i]);
15840 vma_delete(
this, m_pBlockVectors[i]);
15844 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15846 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15847 ImportVulkanFunctions_Static();
15850 if(pVulkanFunctions != VMA_NULL)
15852 ImportVulkanFunctions_Custom(pVulkanFunctions);
15855 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15856 ImportVulkanFunctions_Dynamic();
15859 ValidateVulkanFunctions();
15862 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15864 void VmaAllocator_T::ImportVulkanFunctions_Static()
15867 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15868 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15869 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15870 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15871 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15872 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15873 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15874 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15875 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15876 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15877 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15878 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15879 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15880 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15881 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15882 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15883 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15886 #if VMA_VULKAN_VERSION >= 1001000
15887 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15889 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15890 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15891 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15892 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15893 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15898 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15900 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15902 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15904 #define VMA_COPY_IF_NOT_NULL(funcName) \
15905 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15907 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15908 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15909 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15910 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15911 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15912 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15913 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15914 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15915 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15916 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15917 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15918 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15919 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15920 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15921 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15922 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15923 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15925 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15926 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15927 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15930 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15931 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15932 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15935 #if VMA_MEMORY_BUDGET
15936 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15939 #undef VMA_COPY_IF_NOT_NULL
15942 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15944 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15946 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15947 if(m_VulkanFunctions.memberName == VMA_NULL) \
15948 m_VulkanFunctions.memberName = \
15949 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15950 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15951 if(m_VulkanFunctions.memberName == VMA_NULL) \
15952 m_VulkanFunctions.memberName = \
15953 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15955 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15956 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15957 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15958 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15959 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15960 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15961 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15962 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15963 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15964 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15965 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15966 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15967 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15968 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15969 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15970 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15971 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
15973 #if VMA_VULKAN_VERSION >= 1001000
15974 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15976 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
15977 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
15978 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
15979 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
15980 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
15984 #if VMA_DEDICATED_ALLOCATION
15985 if(m_UseKhrDedicatedAllocation)
15987 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
15988 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
15992 #if VMA_BIND_MEMORY2
15993 if(m_UseKhrBindMemory2)
15995 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
15996 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
15998 #endif // #if VMA_BIND_MEMORY2
16000 #if VMA_MEMORY_BUDGET
16001 if(m_UseExtMemoryBudget)
16003 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16005 #endif // #if VMA_MEMORY_BUDGET
16007 #undef VMA_FETCH_DEVICE_FUNC
16008 #undef VMA_FETCH_INSTANCE_FUNC
16011 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16013 void VmaAllocator_T::ValidateVulkanFunctions()
16015 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16016 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16017 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16018 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16019 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16020 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16021 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16022 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16023 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16024 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16025 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16026 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16027 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16028 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16029 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16030 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16031 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16033 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16034 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16036 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16037 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16041 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16042 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16044 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16045 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16049 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16050 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16052 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16057 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16059 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16060 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16061 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16062 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16065 VkResult VmaAllocator_T::AllocateMemoryOfType(
16067 VkDeviceSize alignment,
16068 bool dedicatedAllocation,
16069 VkBuffer dedicatedBuffer,
16070 VkBufferUsageFlags dedicatedBufferUsage,
16071 VkImage dedicatedImage,
16073 uint32_t memTypeIndex,
16074 VmaSuballocationType suballocType,
16075 size_t allocationCount,
16078 VMA_ASSERT(pAllocations != VMA_NULL);
16079 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16085 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16095 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16096 VMA_ASSERT(blockVector);
16098 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16099 bool preferDedicatedMemory =
16100 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16101 dedicatedAllocation ||
16103 size > preferredBlockSize / 2;
16105 if(preferDedicatedMemory &&
16107 finalCreateInfo.
pool == VK_NULL_HANDLE)
16116 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16120 return AllocateDedicatedMemory(
16129 dedicatedBufferUsage,
16137 VkResult res = blockVector->Allocate(
16138 m_CurrentFrameIndex.load(),
16145 if(res == VK_SUCCESS)
16153 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16157 res = AllocateDedicatedMemory(
16166 dedicatedBufferUsage,
16170 if(res == VK_SUCCESS)
16173 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16179 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16186 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16188 VmaSuballocationType suballocType,
16189 uint32_t memTypeIndex,
16192 bool isUserDataString,
16194 VkBuffer dedicatedBuffer,
16195 VkBufferUsageFlags dedicatedBufferUsage,
16196 VkImage dedicatedImage,
16197 size_t allocationCount,
16200 VMA_ASSERT(allocationCount > 0 && pAllocations);
16204 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16206 GetBudget(&heapBudget, heapIndex, 1);
16207 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16209 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16213 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16214 allocInfo.memoryTypeIndex = memTypeIndex;
16215 allocInfo.allocationSize = size;
16217 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16218 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16219 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16221 if(dedicatedBuffer != VK_NULL_HANDLE)
16223 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16224 dedicatedAllocInfo.buffer = dedicatedBuffer;
16225 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16227 else if(dedicatedImage != VK_NULL_HANDLE)
16229 dedicatedAllocInfo.image = dedicatedImage;
16230 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16233 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16235 #if VMA_BUFFER_DEVICE_ADDRESS
16236 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16237 if(m_UseKhrBufferDeviceAddress)
16239 bool canContainBufferWithDeviceAddress =
true;
16240 if(dedicatedBuffer != VK_NULL_HANDLE)
16242 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16243 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16245 else if(dedicatedImage != VK_NULL_HANDLE)
16247 canContainBufferWithDeviceAddress =
false;
16249 if(canContainBufferWithDeviceAddress)
16251 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16252 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16255 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
16258 VkResult res = VK_SUCCESS;
16259 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16261 res = AllocateDedicatedMemoryPage(
16269 pAllocations + allocIndex);
16270 if(res != VK_SUCCESS)
16276 if(res == VK_SUCCESS)
16280 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16281 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16282 VMA_ASSERT(pDedicatedAllocations);
16283 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16285 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
16289 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16294 while(allocIndex--)
16297 VkDeviceMemory hMemory = currAlloc->GetMemory();
16309 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16310 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16311 currAlloc->SetUserData(
this, VMA_NULL);
16312 m_AllocationObjectAllocator.Free(currAlloc);
16315 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16321 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16323 VmaSuballocationType suballocType,
16324 uint32_t memTypeIndex,
16325 const VkMemoryAllocateInfo& allocInfo,
16327 bool isUserDataString,
16331 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16332 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16335 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16339 void* pMappedData = VMA_NULL;
16342 res = (*m_VulkanFunctions.vkMapMemory)(
16351 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16352 FreeVulkanMemory(memTypeIndex, size, hMemory);
16357 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16358 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16359 (*pAllocation)->SetUserData(
this, pUserData);
16360 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16361 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16363 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16369 void VmaAllocator_T::GetBufferMemoryRequirements(
16371 VkMemoryRequirements& memReq,
16372 bool& requiresDedicatedAllocation,
16373 bool& prefersDedicatedAllocation)
const
16375 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16376 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16378 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16379 memReqInfo.buffer = hBuffer;
16381 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16383 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16384 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16386 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16388 memReq = memReq2.memoryRequirements;
16389 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16390 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16393 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16395 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16396 requiresDedicatedAllocation =
false;
16397 prefersDedicatedAllocation =
false;
16401 void VmaAllocator_T::GetImageMemoryRequirements(
16403 VkMemoryRequirements& memReq,
16404 bool& requiresDedicatedAllocation,
16405 bool& prefersDedicatedAllocation)
const
16407 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16408 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16410 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16411 memReqInfo.image = hImage;
16413 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16415 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16416 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16418 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16420 memReq = memReq2.memoryRequirements;
16421 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16422 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16425 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16427 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16428 requiresDedicatedAllocation =
false;
16429 prefersDedicatedAllocation =
false;
16433 VkResult VmaAllocator_T::AllocateMemory(
16434 const VkMemoryRequirements& vkMemReq,
16435 bool requiresDedicatedAllocation,
16436 bool prefersDedicatedAllocation,
16437 VkBuffer dedicatedBuffer,
16438 VkBufferUsageFlags dedicatedBufferUsage,
16439 VkImage dedicatedImage,
16441 VmaSuballocationType suballocType,
16442 size_t allocationCount,
16445 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16447 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16449 if(vkMemReq.size == 0)
16451 return VK_ERROR_VALIDATION_FAILED_EXT;
16456 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16457 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16462 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16463 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16465 if(requiresDedicatedAllocation)
16469 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16470 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16472 if(createInfo.
pool != VK_NULL_HANDLE)
16474 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16475 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16478 if((createInfo.
pool != VK_NULL_HANDLE) &&
16481 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16482 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16485 if(createInfo.
pool != VK_NULL_HANDLE)
16487 const VkDeviceSize alignmentForPool = VMA_MAX(
16488 vkMemReq.alignment,
16489 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16494 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16499 return createInfo.
pool->m_BlockVector.Allocate(
16500 m_CurrentFrameIndex.load(),
16511 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16512 uint32_t memTypeIndex = UINT32_MAX;
16514 if(res == VK_SUCCESS)
16516 VkDeviceSize alignmentForMemType = VMA_MAX(
16517 vkMemReq.alignment,
16518 GetMemoryTypeMinAlignment(memTypeIndex));
16520 res = AllocateMemoryOfType(
16522 alignmentForMemType,
16523 requiresDedicatedAllocation || prefersDedicatedAllocation,
16525 dedicatedBufferUsage,
16533 if(res == VK_SUCCESS)
16543 memoryTypeBits &= ~(1u << memTypeIndex);
16546 if(res == VK_SUCCESS)
16548 alignmentForMemType = VMA_MAX(
16549 vkMemReq.alignment,
16550 GetMemoryTypeMinAlignment(memTypeIndex));
16552 res = AllocateMemoryOfType(
16554 alignmentForMemType,
16555 requiresDedicatedAllocation || prefersDedicatedAllocation,
16557 dedicatedBufferUsage,
16565 if(res == VK_SUCCESS)
16575 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16586 void VmaAllocator_T::FreeMemory(
16587 size_t allocationCount,
16590 VMA_ASSERT(pAllocations);
16592 for(
size_t allocIndex = allocationCount; allocIndex--; )
16596 if(allocation != VK_NULL_HANDLE)
16598 if(TouchAllocation(allocation))
16600 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16602 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16605 switch(allocation->GetType())
16607 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16609 VmaBlockVector* pBlockVector = VMA_NULL;
16610 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16611 if(hPool != VK_NULL_HANDLE)
16613 pBlockVector = &hPool->m_BlockVector;
16617 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16618 pBlockVector = m_pBlockVectors[memTypeIndex];
16620 pBlockVector->Free(allocation);
16623 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16624 FreeDedicatedMemory(allocation);
16632 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16633 allocation->SetUserData(
this, VMA_NULL);
16634 m_AllocationObjectAllocator.Free(allocation);
16639 VkResult VmaAllocator_T::ResizeAllocation(
16641 VkDeviceSize newSize)
16644 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16646 return VK_ERROR_VALIDATION_FAILED_EXT;
16648 if(newSize == alloc->GetSize())
16652 return VK_ERROR_OUT_OF_POOL_MEMORY;
16655 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16658 InitStatInfo(pStats->
total);
16659 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16661 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16665 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16667 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16668 VMA_ASSERT(pBlockVector);
16669 pBlockVector->AddStats(pStats);
16674 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16675 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16677 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16682 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16684 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16685 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16686 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16687 VMA_ASSERT(pDedicatedAllocVector);
16688 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16691 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16692 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16693 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16694 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16699 VmaPostprocessCalcStatInfo(pStats->
total);
16700 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16701 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16702 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16703 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16706 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16708 #if VMA_MEMORY_BUDGET
16709 if(m_UseExtMemoryBudget)
16711 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16713 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16714 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16716 const uint32_t heapIndex = firstHeap + i;
16718 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16721 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16723 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16724 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16728 outBudget->
usage = 0;
16732 outBudget->
budget = VMA_MIN(
16733 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16738 UpdateVulkanBudget();
16739 GetBudget(outBudget, firstHeap, heapCount);
16745 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16747 const uint32_t heapIndex = firstHeap + i;
16749 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16753 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16758 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16760 VkResult VmaAllocator_T::DefragmentationBegin(
16770 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16771 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16774 (*pContext)->AddAllocations(
16777 VkResult res = (*pContext)->Defragment(
16782 if(res != VK_NOT_READY)
16784 vma_delete(
this, *pContext);
16785 *pContext = VMA_NULL;
16791 VkResult VmaAllocator_T::DefragmentationEnd(
16794 vma_delete(
this, context);
16798 VkResult VmaAllocator_T::DefragmentationPassBegin(
16802 return context->DefragmentPassBegin(pInfo);
16804 VkResult VmaAllocator_T::DefragmentationPassEnd(
16807 return context->DefragmentPassEnd();
16813 if(hAllocation->CanBecomeLost())
16819 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16820 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16823 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16827 pAllocationInfo->
offset = 0;
16828 pAllocationInfo->
size = hAllocation->GetSize();
16830 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16833 else if(localLastUseFrameIndex == localCurrFrameIndex)
16835 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16836 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16837 pAllocationInfo->
offset = hAllocation->GetOffset();
16838 pAllocationInfo->
size = hAllocation->GetSize();
16840 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16845 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16847 localLastUseFrameIndex = localCurrFrameIndex;
16854 #if VMA_STATS_STRING_ENABLED
16855 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16856 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16859 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16860 if(localLastUseFrameIndex == localCurrFrameIndex)
16866 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16868 localLastUseFrameIndex = localCurrFrameIndex;
16874 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16875 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16876 pAllocationInfo->
offset = hAllocation->GetOffset();
16877 pAllocationInfo->
size = hAllocation->GetSize();
16878 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16879 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16883 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16886 if(hAllocation->CanBecomeLost())
16888 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16889 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16892 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16896 else if(localLastUseFrameIndex == localCurrFrameIndex)
16902 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16904 localLastUseFrameIndex = localCurrFrameIndex;
16911 #if VMA_STATS_STRING_ENABLED
16912 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16913 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16916 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16917 if(localLastUseFrameIndex == localCurrFrameIndex)
16923 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16925 localLastUseFrameIndex = localCurrFrameIndex;
16937 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16947 return VK_ERROR_INITIALIZATION_FAILED;
16951 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16953 return VK_ERROR_FEATURE_NOT_PRESENT;
16956 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16958 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16960 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16961 if(res != VK_SUCCESS)
16963 vma_delete(
this, *pPool);
16970 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16971 (*pPool)->SetId(m_NextPoolId++);
16972 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16978 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16982 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16983 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16984 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16987 vma_delete(
this, pool);
16992 pool->m_BlockVector.GetPoolStats(pPoolStats);
16995 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16997 m_CurrentFrameIndex.store(frameIndex);
16999 #if VMA_MEMORY_BUDGET
17000 if(m_UseExtMemoryBudget)
17002 UpdateVulkanBudget();
17004 #endif // #if VMA_MEMORY_BUDGET
17007 void VmaAllocator_T::MakePoolAllocationsLost(
17009 size_t* pLostAllocationCount)
17011 hPool->m_BlockVector.MakePoolAllocationsLost(
17012 m_CurrentFrameIndex.load(),
17013 pLostAllocationCount);
17016 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17018 return hPool->m_BlockVector.CheckCorruption();
17021 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17023 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17026 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17028 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17030 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17031 VMA_ASSERT(pBlockVector);
17032 VkResult localRes = pBlockVector->CheckCorruption();
17035 case VK_ERROR_FEATURE_NOT_PRESENT:
17038 finalRes = VK_SUCCESS;
17048 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17049 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
17051 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17053 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
17056 case VK_ERROR_FEATURE_NOT_PRESENT:
17059 finalRes = VK_SUCCESS;
17071 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17073 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17074 (*pAllocation)->InitLost();
17077 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17079 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17082 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17084 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17085 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17088 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17089 if(blockBytesAfterAllocation > heapSize)
17091 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17093 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17101 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17105 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17107 if(res == VK_SUCCESS)
17109 #if VMA_MEMORY_BUDGET
17110 ++m_Budget.m_OperationsSinceBudgetFetch;
17114 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17116 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17121 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17127 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17130 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17132 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17136 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17138 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17141 VkResult VmaAllocator_T::BindVulkanBuffer(
17142 VkDeviceMemory memory,
17143 VkDeviceSize memoryOffset,
17147 if(pNext != VMA_NULL)
17149 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17150 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17151 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17153 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17154 bindBufferMemoryInfo.pNext = pNext;
17155 bindBufferMemoryInfo.buffer = buffer;
17156 bindBufferMemoryInfo.memory = memory;
17157 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17158 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17161 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17163 return VK_ERROR_EXTENSION_NOT_PRESENT;
17168 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17172 VkResult VmaAllocator_T::BindVulkanImage(
17173 VkDeviceMemory memory,
17174 VkDeviceSize memoryOffset,
17178 if(pNext != VMA_NULL)
17180 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17181 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17182 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17184 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17185 bindBufferMemoryInfo.pNext = pNext;
17186 bindBufferMemoryInfo.image = image;
17187 bindBufferMemoryInfo.memory = memory;
17188 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17189 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17192 #endif // #if VMA_BIND_MEMORY2
17194 return VK_ERROR_EXTENSION_NOT_PRESENT;
17199 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17203 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17205 if(hAllocation->CanBecomeLost())
17207 return VK_ERROR_MEMORY_MAP_FAILED;
17210 switch(hAllocation->GetType())
17212 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17214 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17215 char *pBytes = VMA_NULL;
17216 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17217 if(res == VK_SUCCESS)
17219 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17220 hAllocation->BlockAllocMap();
17224 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17225 return hAllocation->DedicatedAllocMap(
this, ppData);
17228 return VK_ERROR_MEMORY_MAP_FAILED;
17234 switch(hAllocation->GetType())
17236 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17238 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17239 hAllocation->BlockAllocUnmap();
17240 pBlock->Unmap(
this, 1);
17243 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17244 hAllocation->DedicatedAllocUnmap(
this);
17251 VkResult VmaAllocator_T::BindBufferMemory(
17253 VkDeviceSize allocationLocalOffset,
17257 VkResult res = VK_SUCCESS;
17258 switch(hAllocation->GetType())
17260 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17261 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17263 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17265 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17266 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17267 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17276 VkResult VmaAllocator_T::BindImageMemory(
17278 VkDeviceSize allocationLocalOffset,
17282 VkResult res = VK_SUCCESS;
17283 switch(hAllocation->GetType())
17285 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17286 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17288 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17290 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17291 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17292 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17301 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17303 VkDeviceSize offset, VkDeviceSize size,
17304 VMA_CACHE_OPERATION op)
17306 VkResult res = VK_SUCCESS;
17308 VkMappedMemoryRange memRange = {};
17309 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17313 case VMA_CACHE_FLUSH:
17314 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17316 case VMA_CACHE_INVALIDATE:
17317 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17327 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17328 uint32_t allocationCount,
17330 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17331 VMA_CACHE_OPERATION op)
17333 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17334 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17335 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17337 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17340 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17341 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17342 VkMappedMemoryRange newRange;
17343 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17345 ranges.push_back(newRange);
17349 VkResult res = VK_SUCCESS;
17350 if(!ranges.empty())
17354 case VMA_CACHE_FLUSH:
17355 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17357 case VMA_CACHE_INVALIDATE:
17358 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17368 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17370 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17372 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17374 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17375 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
17376 VMA_ASSERT(pDedicatedAllocations);
17377 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
17378 VMA_ASSERT(success);
17381 VkDeviceMemory hMemory = allocation->GetMemory();
17393 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17395 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17398 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17400 VkBufferCreateInfo dummyBufCreateInfo;
17401 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17403 uint32_t memoryTypeBits = 0;
17406 VkBuffer buf = VK_NULL_HANDLE;
17407 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17408 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17409 if(res == VK_SUCCESS)
17412 VkMemoryRequirements memReq;
17413 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17414 memoryTypeBits = memReq.memoryTypeBits;
17417 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17420 return memoryTypeBits;
17423 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17426 VMA_ASSERT(GetMemoryTypeCount() > 0);
17428 uint32_t memoryTypeBits = UINT32_MAX;
17430 if(!m_UseAmdDeviceCoherentMemory)
17433 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17435 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17437 memoryTypeBits &= ~(1u << memTypeIndex);
17442 return memoryTypeBits;
17445 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17447 VkDeviceSize offset, VkDeviceSize size,
17448 VkMappedMemoryRange& outRange)
const
17450 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17451 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17453 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17454 const VkDeviceSize allocationSize = allocation->GetSize();
17455 VMA_ASSERT(offset <= allocationSize);
17457 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17458 outRange.pNext = VMA_NULL;
17459 outRange.memory = allocation->GetMemory();
17461 switch(allocation->GetType())
17463 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17464 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17465 if(size == VK_WHOLE_SIZE)
17467 outRange.size = allocationSize - outRange.offset;
17471 VMA_ASSERT(offset + size <= allocationSize);
17472 outRange.size = VMA_MIN(
17473 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17474 allocationSize - outRange.offset);
17477 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17480 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17481 if(size == VK_WHOLE_SIZE)
17483 size = allocationSize - offset;
17487 VMA_ASSERT(offset + size <= allocationSize);
17489 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17492 const VkDeviceSize allocationOffset = allocation->GetOffset();
17493 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17494 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17495 outRange.offset += allocationOffset;
17496 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17508 #if VMA_MEMORY_BUDGET
17510 void VmaAllocator_T::UpdateVulkanBudget()
17512 VMA_ASSERT(m_UseExtMemoryBudget);
17514 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17516 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17517 VmaPnextChainPushFront(&memProps, &budgetProps);
17519 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17522 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17524 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17526 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17527 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17528 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17531 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17533 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17535 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17537 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17539 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17541 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17544 m_Budget.m_OperationsSinceBudgetFetch = 0;
17548 #endif // #if VMA_MEMORY_BUDGET
17550 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17552 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17553 !hAllocation->CanBecomeLost() &&
17554 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17556 void* pData = VMA_NULL;
17557 VkResult res = Map(hAllocation, &pData);
17558 if(res == VK_SUCCESS)
17560 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17561 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17562 Unmap(hAllocation);
17566 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17571 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17573 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17574 if(memoryTypeBits == UINT32_MAX)
17576 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17577 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17579 return memoryTypeBits;
17582 #if VMA_STATS_STRING_ENABLED
17584 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17586 bool dedicatedAllocationsStarted =
false;
17587 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17589 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17590 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17591 VMA_ASSERT(pDedicatedAllocVector);
17592 if(pDedicatedAllocVector->empty() ==
false)
17594 if(dedicatedAllocationsStarted ==
false)
17596 dedicatedAllocationsStarted =
true;
17597 json.WriteString(
"DedicatedAllocations");
17598 json.BeginObject();
17601 json.BeginString(
"Type ");
17602 json.ContinueString(memTypeIndex);
17607 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17609 json.BeginObject(
true);
17611 hAlloc->PrintParameters(json);
17618 if(dedicatedAllocationsStarted)
17624 bool allocationsStarted =
false;
17625 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17627 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17629 if(allocationsStarted ==
false)
17631 allocationsStarted =
true;
17632 json.WriteString(
"DefaultPools");
17633 json.BeginObject();
17636 json.BeginString(
"Type ");
17637 json.ContinueString(memTypeIndex);
17640 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17643 if(allocationsStarted)
17651 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17652 const size_t poolCount = m_Pools.size();
17655 json.WriteString(
"Pools");
17656 json.BeginObject();
17657 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17659 json.BeginString();
17660 json.ContinueString(m_Pools[poolIndex]->GetId());
17663 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17670 #endif // #if VMA_STATS_STRING_ENABLED
17679 VMA_ASSERT(pCreateInfo && pAllocator);
17682 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17684 return (*pAllocator)->Init(pCreateInfo);
17690 if(allocator != VK_NULL_HANDLE)
17692 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17693 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17694 vma_delete(&allocationCallbacks, allocator);
17700 VMA_ASSERT(allocator && pAllocatorInfo);
17701 pAllocatorInfo->
instance = allocator->m_hInstance;
17702 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17703 pAllocatorInfo->
device = allocator->m_hDevice;
17708 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17710 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17711 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17716 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17718 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17719 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17724 uint32_t memoryTypeIndex,
17725 VkMemoryPropertyFlags* pFlags)
17727 VMA_ASSERT(allocator && pFlags);
17728 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17729 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17734 uint32_t frameIndex)
17736 VMA_ASSERT(allocator);
17737 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17739 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17741 allocator->SetCurrentFrameIndex(frameIndex);
17748 VMA_ASSERT(allocator && pStats);
17749 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17750 allocator->CalculateStats(pStats);
17757 VMA_ASSERT(allocator && pBudget);
17758 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17759 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17762 #if VMA_STATS_STRING_ENABLED
17766 char** ppStatsString,
17767 VkBool32 detailedMap)
17769 VMA_ASSERT(allocator && ppStatsString);
17770 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17772 VmaStringBuilder sb(allocator);
17774 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17775 json.BeginObject();
17778 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17781 allocator->CalculateStats(&stats);
17783 json.WriteString(
"Total");
17784 VmaPrintStatInfo(json, stats.
total);
17786 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17788 json.BeginString(
"Heap ");
17789 json.ContinueString(heapIndex);
17791 json.BeginObject();
17793 json.WriteString(
"Size");
17794 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17796 json.WriteString(
"Flags");
17797 json.BeginArray(
true);
17798 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17800 json.WriteString(
"DEVICE_LOCAL");
17804 json.WriteString(
"Budget");
17805 json.BeginObject();
17807 json.WriteString(
"BlockBytes");
17808 json.WriteNumber(budget[heapIndex].blockBytes);
17809 json.WriteString(
"AllocationBytes");
17810 json.WriteNumber(budget[heapIndex].allocationBytes);
17811 json.WriteString(
"Usage");
17812 json.WriteNumber(budget[heapIndex].usage);
17813 json.WriteString(
"Budget");
17814 json.WriteNumber(budget[heapIndex].budget);
17820 json.WriteString(
"Stats");
17821 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17824 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17826 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17828 json.BeginString(
"Type ");
17829 json.ContinueString(typeIndex);
17832 json.BeginObject();
17834 json.WriteString(
"Flags");
17835 json.BeginArray(
true);
17836 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17837 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17839 json.WriteString(
"DEVICE_LOCAL");
17841 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17843 json.WriteString(
"HOST_VISIBLE");
17845 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17847 json.WriteString(
"HOST_COHERENT");
17849 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17851 json.WriteString(
"HOST_CACHED");
17853 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17855 json.WriteString(
"LAZILY_ALLOCATED");
17857 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17859 json.WriteString(
" PROTECTED");
17861 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17863 json.WriteString(
" DEVICE_COHERENT");
17865 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17867 json.WriteString(
" DEVICE_UNCACHED");
17873 json.WriteString(
"Stats");
17874 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17883 if(detailedMap == VK_TRUE)
17885 allocator->PrintDetailedMap(json);
17891 const size_t len = sb.GetLength();
17892 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17895 memcpy(pChars, sb.GetData(), len);
17897 pChars[len] =
'\0';
17898 *ppStatsString = pChars;
17903 char* pStatsString)
17905 if(pStatsString != VMA_NULL)
17907 VMA_ASSERT(allocator);
17908 size_t len = strlen(pStatsString);
17909 vma_delete_array(allocator, pStatsString, len + 1);
17913 #endif // #if VMA_STATS_STRING_ENABLED
17920 uint32_t memoryTypeBits,
17922 uint32_t* pMemoryTypeIndex)
17924 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17925 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17926 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17928 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17935 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17936 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17937 uint32_t notPreferredFlags = 0;
17940 switch(pAllocationCreateInfo->
usage)
17945 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17947 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17951 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17954 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17955 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17957 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17961 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17962 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17965 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17968 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17977 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17979 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17982 *pMemoryTypeIndex = UINT32_MAX;
17983 uint32_t minCost = UINT32_MAX;
17984 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17985 memTypeIndex < allocator->GetMemoryTypeCount();
17986 ++memTypeIndex, memTypeBit <<= 1)
17989 if((memTypeBit & memoryTypeBits) != 0)
17991 const VkMemoryPropertyFlags currFlags =
17992 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17994 if((requiredFlags & ~currFlags) == 0)
17997 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17998 VmaCountBitsSet(currFlags & notPreferredFlags);
18000 if(currCost < minCost)
18002 *pMemoryTypeIndex = memTypeIndex;
18007 minCost = currCost;
18012 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18017 const VkBufferCreateInfo* pBufferCreateInfo,
18019 uint32_t* pMemoryTypeIndex)
18021 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18022 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18023 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18024 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18026 const VkDevice hDev = allocator->m_hDevice;
18027 VkBuffer hBuffer = VK_NULL_HANDLE;
18028 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18029 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18030 if(res == VK_SUCCESS)
18032 VkMemoryRequirements memReq = {};
18033 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18034 hDev, hBuffer, &memReq);
18038 memReq.memoryTypeBits,
18039 pAllocationCreateInfo,
18042 allocator->GetVulkanFunctions().vkDestroyBuffer(
18043 hDev, hBuffer, allocator->GetAllocationCallbacks());
18050 const VkImageCreateInfo* pImageCreateInfo,
18052 uint32_t* pMemoryTypeIndex)
18054 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18055 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18056 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18057 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18059 const VkDevice hDev = allocator->m_hDevice;
18060 VkImage hImage = VK_NULL_HANDLE;
18061 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18062 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18063 if(res == VK_SUCCESS)
18065 VkMemoryRequirements memReq = {};
18066 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18067 hDev, hImage, &memReq);
18071 memReq.memoryTypeBits,
18072 pAllocationCreateInfo,
18075 allocator->GetVulkanFunctions().vkDestroyImage(
18076 hDev, hImage, allocator->GetAllocationCallbacks());
18086 VMA_ASSERT(allocator && pCreateInfo && pPool);
18088 VMA_DEBUG_LOG(
"vmaCreatePool");
18090 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18092 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18094 #if VMA_RECORDING_ENABLED
18095 if(allocator->GetRecorder() != VMA_NULL)
18097 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18108 VMA_ASSERT(allocator);
18110 if(pool == VK_NULL_HANDLE)
18115 VMA_DEBUG_LOG(
"vmaDestroyPool");
18117 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18119 #if VMA_RECORDING_ENABLED
18120 if(allocator->GetRecorder() != VMA_NULL)
18122 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18126 allocator->DestroyPool(pool);
18134 VMA_ASSERT(allocator && pool && pPoolStats);
18136 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18138 allocator->GetPoolStats(pool, pPoolStats);
18144 size_t* pLostAllocationCount)
18146 VMA_ASSERT(allocator && pool);
18148 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18150 #if VMA_RECORDING_ENABLED
18151 if(allocator->GetRecorder() != VMA_NULL)
18153 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18157 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18162 VMA_ASSERT(allocator && pool);
18164 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18166 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18168 return allocator->CheckPoolCorruption(pool);
18174 const char** ppName)
18176 VMA_ASSERT(allocator && pool && ppName);
18178 VMA_DEBUG_LOG(
"vmaGetPoolName");
18180 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18182 *ppName = pool->GetName();
18190 VMA_ASSERT(allocator && pool);
18192 VMA_DEBUG_LOG(
"vmaSetPoolName");
18194 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18196 pool->SetName(pName);
18198 #if VMA_RECORDING_ENABLED
18199 if(allocator->GetRecorder() != VMA_NULL)
18201 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18208 const VkMemoryRequirements* pVkMemoryRequirements,
18213 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18215 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18217 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18219 VkResult result = allocator->AllocateMemory(
18220 *pVkMemoryRequirements,
18227 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18231 #if VMA_RECORDING_ENABLED
18232 if(allocator->GetRecorder() != VMA_NULL)
18234 allocator->GetRecorder()->RecordAllocateMemory(
18235 allocator->GetCurrentFrameIndex(),
18236 *pVkMemoryRequirements,
18242 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18244 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18252 const VkMemoryRequirements* pVkMemoryRequirements,
18254 size_t allocationCount,
18258 if(allocationCount == 0)
18263 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18265 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18267 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18269 VkResult result = allocator->AllocateMemory(
18270 *pVkMemoryRequirements,
18277 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18281 #if VMA_RECORDING_ENABLED
18282 if(allocator->GetRecorder() != VMA_NULL)
18284 allocator->GetRecorder()->RecordAllocateMemoryPages(
18285 allocator->GetCurrentFrameIndex(),
18286 *pVkMemoryRequirements,
18288 (uint64_t)allocationCount,
18293 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18295 for(
size_t i = 0; i < allocationCount; ++i)
18297 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18311 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18313 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18315 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18317 VkMemoryRequirements vkMemReq = {};
18318 bool requiresDedicatedAllocation =
false;
18319 bool prefersDedicatedAllocation =
false;
18320 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18321 requiresDedicatedAllocation,
18322 prefersDedicatedAllocation);
18324 VkResult result = allocator->AllocateMemory(
18326 requiresDedicatedAllocation,
18327 prefersDedicatedAllocation,
18332 VMA_SUBALLOCATION_TYPE_BUFFER,
18336 #if VMA_RECORDING_ENABLED
18337 if(allocator->GetRecorder() != VMA_NULL)
18339 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18340 allocator->GetCurrentFrameIndex(),
18342 requiresDedicatedAllocation,
18343 prefersDedicatedAllocation,
18349 if(pAllocationInfo && result == VK_SUCCESS)
18351 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18364 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18366 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18368 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18370 VkMemoryRequirements vkMemReq = {};
18371 bool requiresDedicatedAllocation =
false;
18372 bool prefersDedicatedAllocation =
false;
18373 allocator->GetImageMemoryRequirements(image, vkMemReq,
18374 requiresDedicatedAllocation, prefersDedicatedAllocation);
18376 VkResult result = allocator->AllocateMemory(
18378 requiresDedicatedAllocation,
18379 prefersDedicatedAllocation,
18384 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18388 #if VMA_RECORDING_ENABLED
18389 if(allocator->GetRecorder() != VMA_NULL)
18391 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18392 allocator->GetCurrentFrameIndex(),
18394 requiresDedicatedAllocation,
18395 prefersDedicatedAllocation,
18401 if(pAllocationInfo && result == VK_SUCCESS)
18403 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18413 VMA_ASSERT(allocator);
18415 if(allocation == VK_NULL_HANDLE)
18420 VMA_DEBUG_LOG(
"vmaFreeMemory");
18422 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18424 #if VMA_RECORDING_ENABLED
18425 if(allocator->GetRecorder() != VMA_NULL)
18427 allocator->GetRecorder()->RecordFreeMemory(
18428 allocator->GetCurrentFrameIndex(),
18433 allocator->FreeMemory(
18440 size_t allocationCount,
18443 if(allocationCount == 0)
18448 VMA_ASSERT(allocator);
18450 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18452 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18454 #if VMA_RECORDING_ENABLED
18455 if(allocator->GetRecorder() != VMA_NULL)
18457 allocator->GetRecorder()->RecordFreeMemoryPages(
18458 allocator->GetCurrentFrameIndex(),
18459 (uint64_t)allocationCount,
18464 allocator->FreeMemory(allocationCount, pAllocations);
18470 VkDeviceSize newSize)
18472 VMA_ASSERT(allocator && allocation);
18474 VMA_DEBUG_LOG(
"vmaResizeAllocation");
18476 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18478 return allocator->ResizeAllocation(allocation, newSize);
18486 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18488 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18490 #if VMA_RECORDING_ENABLED
18491 if(allocator->GetRecorder() != VMA_NULL)
18493 allocator->GetRecorder()->RecordGetAllocationInfo(
18494 allocator->GetCurrentFrameIndex(),
18499 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18506 VMA_ASSERT(allocator && allocation);
18508 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18510 #if VMA_RECORDING_ENABLED
18511 if(allocator->GetRecorder() != VMA_NULL)
18513 allocator->GetRecorder()->RecordTouchAllocation(
18514 allocator->GetCurrentFrameIndex(),
18519 return allocator->TouchAllocation(allocation);
18527 VMA_ASSERT(allocator && allocation);
18529 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18531 allocation->SetUserData(allocator, pUserData);
18533 #if VMA_RECORDING_ENABLED
18534 if(allocator->GetRecorder() != VMA_NULL)
18536 allocator->GetRecorder()->RecordSetAllocationUserData(
18537 allocator->GetCurrentFrameIndex(),
18548 VMA_ASSERT(allocator && pAllocation);
18550 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18552 allocator->CreateLostAllocation(pAllocation);
18554 #if VMA_RECORDING_ENABLED
18555 if(allocator->GetRecorder() != VMA_NULL)
18557 allocator->GetRecorder()->RecordCreateLostAllocation(
18558 allocator->GetCurrentFrameIndex(),
18569 VMA_ASSERT(allocator && allocation && ppData);
18571 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18573 VkResult res = allocator->Map(allocation, ppData);
18575 #if VMA_RECORDING_ENABLED
18576 if(allocator->GetRecorder() != VMA_NULL)
18578 allocator->GetRecorder()->RecordMapMemory(
18579 allocator->GetCurrentFrameIndex(),
18591 VMA_ASSERT(allocator && allocation);
18593 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18595 #if VMA_RECORDING_ENABLED
18596 if(allocator->GetRecorder() != VMA_NULL)
18598 allocator->GetRecorder()->RecordUnmapMemory(
18599 allocator->GetCurrentFrameIndex(),
18604 allocator->Unmap(allocation);
18609 VMA_ASSERT(allocator && allocation);
18611 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18613 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18615 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18617 #if VMA_RECORDING_ENABLED
18618 if(allocator->GetRecorder() != VMA_NULL)
18620 allocator->GetRecorder()->RecordFlushAllocation(
18621 allocator->GetCurrentFrameIndex(),
18622 allocation, offset, size);
18631 VMA_ASSERT(allocator && allocation);
18633 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18635 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18637 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18639 #if VMA_RECORDING_ENABLED
18640 if(allocator->GetRecorder() != VMA_NULL)
18642 allocator->GetRecorder()->RecordInvalidateAllocation(
18643 allocator->GetCurrentFrameIndex(),
18644 allocation, offset, size);
18653 uint32_t allocationCount,
18655 const VkDeviceSize* offsets,
18656 const VkDeviceSize* sizes)
18658 VMA_ASSERT(allocator);
18660 if(allocationCount == 0)
18665 VMA_ASSERT(allocations);
18667 VMA_DEBUG_LOG(
"vmaFlushAllocations");
18669 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18671 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
18673 #if VMA_RECORDING_ENABLED
18674 if(allocator->GetRecorder() != VMA_NULL)
18685 uint32_t allocationCount,
18687 const VkDeviceSize* offsets,
18688 const VkDeviceSize* sizes)
18690 VMA_ASSERT(allocator);
18692 if(allocationCount == 0)
18697 VMA_ASSERT(allocations);
18699 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
18701 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18703 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
18705 #if VMA_RECORDING_ENABLED
18706 if(allocator->GetRecorder() != VMA_NULL)
18717 VMA_ASSERT(allocator);
18719 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18721 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18723 return allocator->CheckCorruption(memoryTypeBits);
18729 size_t allocationCount,
18730 VkBool32* pAllocationsChanged,
18740 if(pDefragmentationInfo != VMA_NULL)
18754 if(res == VK_NOT_READY)
18767 VMA_ASSERT(allocator && pInfo && pContext);
18778 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18780 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18782 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18784 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18786 #if VMA_RECORDING_ENABLED
18787 if(allocator->GetRecorder() != VMA_NULL)
18789 allocator->GetRecorder()->RecordDefragmentationBegin(
18790 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18801 VMA_ASSERT(allocator);
18803 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18805 if(context != VK_NULL_HANDLE)
18807 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18809 #if VMA_RECORDING_ENABLED
18810 if(allocator->GetRecorder() != VMA_NULL)
18812 allocator->GetRecorder()->RecordDefragmentationEnd(
18813 allocator->GetCurrentFrameIndex(), context);
18817 return allocator->DefragmentationEnd(context);
18831 VMA_ASSERT(allocator);
18834 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18836 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18838 if(context == VK_NULL_HANDLE)
18844 return allocator->DefragmentationPassBegin(pInfo, context);
18850 VMA_ASSERT(allocator);
18852 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18853 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18855 if(context == VK_NULL_HANDLE)
18858 return allocator->DefragmentationPassEnd(context);
18866 VMA_ASSERT(allocator && allocation && buffer);
18868 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18870 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18872 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18878 VkDeviceSize allocationLocalOffset,
18882 VMA_ASSERT(allocator && allocation && buffer);
18884 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18886 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18888 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18896 VMA_ASSERT(allocator && allocation && image);
18898 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18900 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18902 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18908 VkDeviceSize allocationLocalOffset,
18912 VMA_ASSERT(allocator && allocation && image);
18914 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18916 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18918 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18923 const VkBufferCreateInfo* pBufferCreateInfo,
18929 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18931 if(pBufferCreateInfo->size == 0)
18933 return VK_ERROR_VALIDATION_FAILED_EXT;
18935 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18936 !allocator->m_UseKhrBufferDeviceAddress)
18938 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18939 return VK_ERROR_VALIDATION_FAILED_EXT;
18942 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18944 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18946 *pBuffer = VK_NULL_HANDLE;
18947 *pAllocation = VK_NULL_HANDLE;
18950 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18951 allocator->m_hDevice,
18953 allocator->GetAllocationCallbacks(),
18958 VkMemoryRequirements vkMemReq = {};
18959 bool requiresDedicatedAllocation =
false;
18960 bool prefersDedicatedAllocation =
false;
18961 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18962 requiresDedicatedAllocation, prefersDedicatedAllocation);
18965 res = allocator->AllocateMemory(
18967 requiresDedicatedAllocation,
18968 prefersDedicatedAllocation,
18970 pBufferCreateInfo->usage,
18972 *pAllocationCreateInfo,
18973 VMA_SUBALLOCATION_TYPE_BUFFER,
18977 #if VMA_RECORDING_ENABLED
18978 if(allocator->GetRecorder() != VMA_NULL)
18980 allocator->GetRecorder()->RecordCreateBuffer(
18981 allocator->GetCurrentFrameIndex(),
18982 *pBufferCreateInfo,
18983 *pAllocationCreateInfo,
18993 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18998 #if VMA_STATS_STRING_ENABLED
18999 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19001 if(pAllocationInfo != VMA_NULL)
19003 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19008 allocator->FreeMemory(
19011 *pAllocation = VK_NULL_HANDLE;
19012 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19013 *pBuffer = VK_NULL_HANDLE;
19016 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19017 *pBuffer = VK_NULL_HANDLE;
19028 VMA_ASSERT(allocator);
19030 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19035 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19037 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19039 #if VMA_RECORDING_ENABLED
19040 if(allocator->GetRecorder() != VMA_NULL)
19042 allocator->GetRecorder()->RecordDestroyBuffer(
19043 allocator->GetCurrentFrameIndex(),
19048 if(buffer != VK_NULL_HANDLE)
19050 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19053 if(allocation != VK_NULL_HANDLE)
19055 allocator->FreeMemory(
19063 const VkImageCreateInfo* pImageCreateInfo,
19069 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19071 if(pImageCreateInfo->extent.width == 0 ||
19072 pImageCreateInfo->extent.height == 0 ||
19073 pImageCreateInfo->extent.depth == 0 ||
19074 pImageCreateInfo->mipLevels == 0 ||
19075 pImageCreateInfo->arrayLayers == 0)
19077 return VK_ERROR_VALIDATION_FAILED_EXT;
19080 VMA_DEBUG_LOG(
"vmaCreateImage");
19082 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19084 *pImage = VK_NULL_HANDLE;
19085 *pAllocation = VK_NULL_HANDLE;
19088 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19089 allocator->m_hDevice,
19091 allocator->GetAllocationCallbacks(),
19095 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19096 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19097 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19100 VkMemoryRequirements vkMemReq = {};
19101 bool requiresDedicatedAllocation =
false;
19102 bool prefersDedicatedAllocation =
false;
19103 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19104 requiresDedicatedAllocation, prefersDedicatedAllocation);
19106 res = allocator->AllocateMemory(
19108 requiresDedicatedAllocation,
19109 prefersDedicatedAllocation,
19113 *pAllocationCreateInfo,
19118 #if VMA_RECORDING_ENABLED
19119 if(allocator->GetRecorder() != VMA_NULL)
19121 allocator->GetRecorder()->RecordCreateImage(
19122 allocator->GetCurrentFrameIndex(),
19124 *pAllocationCreateInfo,
19134 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19139 #if VMA_STATS_STRING_ENABLED
19140 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19142 if(pAllocationInfo != VMA_NULL)
19144 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19149 allocator->FreeMemory(
19152 *pAllocation = VK_NULL_HANDLE;
19153 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19154 *pImage = VK_NULL_HANDLE;
19157 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19158 *pImage = VK_NULL_HANDLE;
19169 VMA_ASSERT(allocator);
19171 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19176 VMA_DEBUG_LOG(
"vmaDestroyImage");
19178 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19180 #if VMA_RECORDING_ENABLED
19181 if(allocator->GetRecorder() != VMA_NULL)
19183 allocator->GetRecorder()->RecordDestroyImage(
19184 allocator->GetCurrentFrameIndex(),
19189 if(image != VK_NULL_HANDLE)
19191 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19193 if(allocation != VK_NULL_HANDLE)
19195 allocator->FreeMemory(
19201 #endif // #ifdef VMA_IMPLEMENTATION