23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2003 #if VMA_RECORDING_ENABLED
2006 #include <windows.h>
2021 #ifndef VMA_RECORDING_ENABLED
2022 #define VMA_RECORDING_ENABLED 0
2026 #define NOMINMAX // For windows.h
2029 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2030 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2031 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2032 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2033 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2034 extern PFN_vkAllocateMemory vkAllocateMemory;
2035 extern PFN_vkFreeMemory vkFreeMemory;
2036 extern PFN_vkMapMemory vkMapMemory;
2037 extern PFN_vkUnmapMemory vkUnmapMemory;
2038 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2039 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2040 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2041 extern PFN_vkBindImageMemory vkBindImageMemory;
2042 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2043 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2044 extern PFN_vkCreateBuffer vkCreateBuffer;
2045 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2046 extern PFN_vkCreateImage vkCreateImage;
2047 extern PFN_vkDestroyImage vkDestroyImage;
2048 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2049 #if VMA_VULKAN_VERSION >= 1001000
2050 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2051 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2052 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2053 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2054 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2055 #endif // #if VMA_VULKAN_VERSION >= 1001000
2056 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
2059 #include <vulkan/vulkan.h>
2065 #if !defined(VMA_VULKAN_VERSION)
2066 #if defined(VK_VERSION_1_2)
2067 #define VMA_VULKAN_VERSION 1002000
2068 #elif defined(VK_VERSION_1_1)
2069 #define VMA_VULKAN_VERSION 1001000
2071 #define VMA_VULKAN_VERSION 1000000
2075 #if !defined(VMA_DEDICATED_ALLOCATION)
2076 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2077 #define VMA_DEDICATED_ALLOCATION 1
2079 #define VMA_DEDICATED_ALLOCATION 0
2083 #if !defined(VMA_BIND_MEMORY2)
2084 #if VK_KHR_bind_memory2
2085 #define VMA_BIND_MEMORY2 1
2087 #define VMA_BIND_MEMORY2 0
2091 #if !defined(VMA_MEMORY_BUDGET)
2092 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2093 #define VMA_MEMORY_BUDGET 1
2095 #define VMA_MEMORY_BUDGET 0
2100 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2101 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2102 #define VMA_BUFFER_DEVICE_ADDRESS 1
2104 #define VMA_BUFFER_DEVICE_ADDRESS 0
2113 #ifndef VMA_CALL_PRE
2114 #define VMA_CALL_PRE
2116 #ifndef VMA_CALL_POST
2117 #define VMA_CALL_POST
2131 #ifndef VMA_LEN_IF_NOT_NULL
2132 #define VMA_LEN_IF_NOT_NULL(len)
2137 #ifndef VMA_NULLABLE
2139 #define VMA_NULLABLE _Nullable
2141 #define VMA_NULLABLE
2147 #ifndef VMA_NOT_NULL
2149 #define VMA_NOT_NULL _Nonnull
2151 #define VMA_NOT_NULL
2157 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2158 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2159 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2161 #define VMA_NOT_NULL_NON_DISPATCHABLE
2165 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2166 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2167 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2169 #define VMA_NULLABLE_NON_DISPATCHABLE
2187 uint32_t memoryType,
2188 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2190 void* VMA_NULLABLE pUserData);
2194 uint32_t memoryType,
2195 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2197 void* VMA_NULLABLE pUserData);
2337 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2338 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2339 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2341 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2342 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2343 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2345 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2346 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2436 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2509 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2517 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2527 uint32_t memoryTypeIndex,
2528 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2540 uint32_t frameIndex);
2636 #ifndef VMA_STATS_STRING_ENABLED
2637 #define VMA_STATS_STRING_ENABLED 1
2640 #if VMA_STATS_STRING_ENABLED
2647 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2648 VkBool32 detailedMap);
2652 char* VMA_NULLABLE pStatsString);
2654 #endif // #if VMA_STATS_STRING_ENABLED
2906 uint32_t memoryTypeBits,
2908 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2924 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2926 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2942 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2944 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3088 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3116 size_t* VMA_NULLABLE pLostAllocationCount);
3143 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3153 const char* VMA_NULLABLE pName);
3242 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3268 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3270 size_t allocationCount,
3271 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3272 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3282 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3290 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3315 size_t allocationCount,
3316 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3328 VkDeviceSize newSize);
3385 void* VMA_NULLABLE pUserData);
3442 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3480 VkDeviceSize offset,
3507 VkDeviceSize offset,
3526 uint32_t allocationCount,
3527 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3528 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3529 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3547 uint32_t allocationCount,
3548 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3549 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3550 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3629 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3663 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3801 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3802 size_t allocationCount,
3803 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3822 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3837 VkDeviceSize allocationLocalOffset,
3838 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3839 const void* VMA_NULLABLE pNext);
3856 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3871 VkDeviceSize allocationLocalOffset,
3872 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3873 const void* VMA_NULLABLE pNext);
3903 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3905 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3922 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3928 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3930 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3947 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
3954 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3957 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3958 #define VMA_IMPLEMENTATION
3961 #ifdef VMA_IMPLEMENTATION
3962 #undef VMA_IMPLEMENTATION
3982 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3983 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3992 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
3993 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4005 #if VMA_USE_STL_CONTAINERS
4006 #define VMA_USE_STL_VECTOR 1
4007 #define VMA_USE_STL_UNORDERED_MAP 1
4008 #define VMA_USE_STL_LIST 1
4011 #ifndef VMA_USE_STL_SHARED_MUTEX
4013 #if __cplusplus >= 201703L
4014 #define VMA_USE_STL_SHARED_MUTEX 1
4018 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4019 #define VMA_USE_STL_SHARED_MUTEX 1
4021 #define VMA_USE_STL_SHARED_MUTEX 0
4029 #if VMA_USE_STL_VECTOR
4033 #if VMA_USE_STL_UNORDERED_MAP
4034 #include <unordered_map>
4037 #if VMA_USE_STL_LIST
4046 #include <algorithm>
4051 #define VMA_NULL nullptr
4054 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4056 void *vma_aligned_alloc(
size_t alignment,
size_t size)
4059 if(alignment <
sizeof(
void*))
4061 alignment =
sizeof(
void*);
4064 return memalign(alignment, size);
4066 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4069 #if defined(__APPLE__)
4070 #include <AvailabilityMacros.h>
4073 void *vma_aligned_alloc(
size_t alignment,
size_t size)
4075 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4076 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4083 if (__builtin_available(macOS 10.15, iOS 13, *))
4084 return aligned_alloc(alignment, size);
4088 if(alignment <
sizeof(
void*))
4090 alignment =
sizeof(
void*);
4094 if(posix_memalign(&pointer, alignment, size) == 0)
4098 #elif defined(_WIN32)
4099 void *vma_aligned_alloc(
size_t alignment,
size_t size)
4101 return _aligned_malloc(size, alignment);
4104 void *vma_aligned_alloc(
size_t alignment,
size_t size)
4106 return aligned_alloc(alignment, size);
4118 #define VMA_ASSERT(expr)
4120 #define VMA_ASSERT(expr) assert(expr)
4126 #ifndef VMA_HEAVY_ASSERT
4128 #define VMA_HEAVY_ASSERT(expr)
4130 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
4134 #ifndef VMA_ALIGN_OF
4135 #define VMA_ALIGN_OF(type) (__alignof(type))
4138 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4139 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4142 #ifndef VMA_SYSTEM_FREE
4144 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
4146 #define VMA_SYSTEM_FREE(ptr) free(ptr)
4151 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4155 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4159 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4163 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4166 #ifndef VMA_DEBUG_LOG
4167 #define VMA_DEBUG_LOG(format, ...)
4177 #if VMA_STATS_STRING_ENABLED
4178 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4180 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4182 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4184 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4186 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4188 snprintf(outStr, strLen,
"%p", ptr);
4196 void Lock() { m_Mutex.lock(); }
4197 void Unlock() { m_Mutex.unlock(); }
4198 bool TryLock() {
return m_Mutex.try_lock(); }
4202 #define VMA_MUTEX VmaMutex
4206 #ifndef VMA_RW_MUTEX
4207 #if VMA_USE_STL_SHARED_MUTEX
4209 #include <shared_mutex>
4213 void LockRead() { m_Mutex.lock_shared(); }
4214 void UnlockRead() { m_Mutex.unlock_shared(); }
4215 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4216 void LockWrite() { m_Mutex.lock(); }
4217 void UnlockWrite() { m_Mutex.unlock(); }
4218 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4220 std::shared_mutex m_Mutex;
4222 #define VMA_RW_MUTEX VmaRWMutex
4223 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4229 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4230 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4231 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4232 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4233 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4234 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4235 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4239 #define VMA_RW_MUTEX VmaRWMutex
4245 void LockRead() { m_Mutex.Lock(); }
4246 void UnlockRead() { m_Mutex.Unlock(); }
4247 bool TryLockRead() {
return m_Mutex.TryLock(); }
4248 void LockWrite() { m_Mutex.Lock(); }
4249 void UnlockWrite() { m_Mutex.Unlock(); }
4250 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4254 #define VMA_RW_MUTEX VmaRWMutex
4255 #endif // #if VMA_USE_STL_SHARED_MUTEX
4256 #endif // #ifndef VMA_RW_MUTEX
4261 #ifndef VMA_ATOMIC_UINT32
4263 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4266 #ifndef VMA_ATOMIC_UINT64
4268 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4271 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4276 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4279 #ifndef VMA_DEBUG_ALIGNMENT
4284 #define VMA_DEBUG_ALIGNMENT (1)
4287 #ifndef VMA_DEBUG_MARGIN
4292 #define VMA_DEBUG_MARGIN (0)
4295 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4300 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4303 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4309 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4312 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4317 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4320 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4325 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4328 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4329 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4333 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4334 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4338 #ifndef VMA_CLASS_NO_COPY
4339 #define VMA_CLASS_NO_COPY(className) \
4341 className(const className&) = delete; \
4342 className& operator=(const className&) = delete;
4345 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4348 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4350 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4351 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4359 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4360 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4361 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4363 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4365 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4366 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4369 static inline uint32_t VmaCountBitsSet(uint32_t v)
4371 uint32_t c = v - ((v >> 1) & 0x55555555);
4372 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4373 c = ((c >> 4) + c) & 0x0F0F0F0F;
4374 c = ((c >> 8) + c) & 0x00FF00FF;
4375 c = ((c >> 16) + c) & 0x0000FFFF;
4384 template <
typename T>
4385 inline bool VmaIsPow2(T x)
4387 return (x & (x-1)) == 0;
4392 template <
typename T>
4393 static inline T VmaAlignUp(T val, T alignment)
4395 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4396 return (val + alignment - 1) & ~(alignment - 1);
4400 template <
typename T>
4401 static inline T VmaAlignDown(T val, T alignment)
4403 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4404 return val & ~(alignment - 1);
4408 template <
typename T>
4409 static inline T VmaRoundDiv(T x, T y)
4411 return (x + (y / (T)2)) / y;
4415 static inline uint32_t VmaNextPow2(uint32_t v)
4426 static inline uint64_t VmaNextPow2(uint64_t v)
4440 static inline uint32_t VmaPrevPow2(uint32_t v)
4450 static inline uint64_t VmaPrevPow2(uint64_t v)
4462 static inline bool VmaStrIsEmpty(
const char* pStr)
4464 return pStr == VMA_NULL || *pStr ==
'\0';
4467 #if VMA_STATS_STRING_ENABLED
4469 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4485 #endif // #if VMA_STATS_STRING_ENABLED
4489 template<
typename Iterator,
typename Compare>
4490 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4492 Iterator centerValue = end; --centerValue;
4493 Iterator insertIndex = beg;
4494 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4496 if(cmp(*memTypeIndex, *centerValue))
4498 if(insertIndex != memTypeIndex)
4500 VMA_SWAP(*memTypeIndex, *insertIndex);
4505 if(insertIndex != centerValue)
4507 VMA_SWAP(*insertIndex, *centerValue);
4512 template<
typename Iterator,
typename Compare>
4513 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4517 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4518 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4519 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4523 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4525 #endif // #ifndef VMA_SORT
4534 static inline bool VmaBlocksOnSamePage(
4535 VkDeviceSize resourceAOffset,
4536 VkDeviceSize resourceASize,
4537 VkDeviceSize resourceBOffset,
4538 VkDeviceSize pageSize)
4540 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4541 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4542 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4543 VkDeviceSize resourceBStart = resourceBOffset;
4544 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4545 return resourceAEndPage == resourceBStartPage;
4548 enum VmaSuballocationType
4550 VMA_SUBALLOCATION_TYPE_FREE = 0,
4551 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4552 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4553 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4554 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4555 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4556 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4565 static inline bool VmaIsBufferImageGranularityConflict(
4566 VmaSuballocationType suballocType1,
4567 VmaSuballocationType suballocType2)
4569 if(suballocType1 > suballocType2)
4571 VMA_SWAP(suballocType1, suballocType2);
4574 switch(suballocType1)
4576 case VMA_SUBALLOCATION_TYPE_FREE:
4578 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4580 case VMA_SUBALLOCATION_TYPE_BUFFER:
4582 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4583 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4584 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4586 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4587 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4588 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4589 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4591 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4592 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4600 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4602 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4603 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4604 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4605 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4607 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4614 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4616 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4617 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4618 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4619 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4621 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4634 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4636 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4637 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4638 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4639 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4645 VMA_CLASS_NO_COPY(VmaMutexLock)
4647 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4648 m_pMutex(useMutex ? &mutex : VMA_NULL)
4649 {
if(m_pMutex) { m_pMutex->Lock(); } }
4651 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4653 VMA_MUTEX* m_pMutex;
4657 struct VmaMutexLockRead
4659 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4661 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4662 m_pMutex(useMutex ? &mutex : VMA_NULL)
4663 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4664 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4666 VMA_RW_MUTEX* m_pMutex;
4670 struct VmaMutexLockWrite
4672 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4674 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4675 m_pMutex(useMutex ? &mutex : VMA_NULL)
4676 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4677 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4679 VMA_RW_MUTEX* m_pMutex;
4682 #if VMA_DEBUG_GLOBAL_MUTEX
4683 static VMA_MUTEX gDebugGlobalMutex;
4684 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4686 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4690 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4701 template <
typename CmpLess,
typename IterT,
typename KeyT>
4702 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4704 size_t down = 0, up = (end - beg);
4707 const size_t mid = (down + up) / 2;
4708 if(cmp(*(beg+mid), key))
4720 template<
typename CmpLess,
typename IterT,
typename KeyT>
4721 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4723 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4724 beg, end, value, cmp);
4726 (!cmp(*it, value) && !cmp(value, *it)))
4738 template<
typename T>
4739 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4741 for(uint32_t i = 0; i < count; ++i)
4743 const T iPtr = arr[i];
4744 if(iPtr == VMA_NULL)
4748 for(uint32_t j = i + 1; j < count; ++j)
4759 template<
typename MainT,
typename NewT>
4760 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4762 newStruct->pNext = mainStruct->pNext;
4763 mainStruct->pNext = newStruct;
4769 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4771 void* result = VMA_NULL;
4772 if((pAllocationCallbacks != VMA_NULL) &&
4773 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4775 result = (*pAllocationCallbacks->pfnAllocation)(
4776 pAllocationCallbacks->pUserData,
4779 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4783 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4785 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4789 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4791 if((pAllocationCallbacks != VMA_NULL) &&
4792 (pAllocationCallbacks->pfnFree != VMA_NULL))
4794 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4798 VMA_SYSTEM_FREE(ptr);
4802 template<
typename T>
4803 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4805 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4808 template<
typename T>
4809 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4811 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4814 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4816 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4818 template<
typename T>
4819 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4822 VmaFree(pAllocationCallbacks, ptr);
4825 template<
typename T>
4826 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4830 for(
size_t i = count; i--; )
4834 VmaFree(pAllocationCallbacks, ptr);
4838 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4840 if(srcStr != VMA_NULL)
4842 const size_t len = strlen(srcStr);
4843 char*
const result = vma_new_array(allocs,
char, len + 1);
4844 memcpy(result, srcStr, len + 1);
4853 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4857 const size_t len = strlen(str);
4858 vma_delete_array(allocs, str, len + 1);
4863 template<
typename T>
4864 class VmaStlAllocator
4867 const VkAllocationCallbacks*
const m_pCallbacks;
4868 typedef T value_type;
4870 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4871 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4873 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4874 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4876 template<
typename U>
4877 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4879 return m_pCallbacks == rhs.m_pCallbacks;
4881 template<
typename U>
4882 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4884 return m_pCallbacks != rhs.m_pCallbacks;
4887 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4890 #if VMA_USE_STL_VECTOR
4892 #define VmaVector std::vector
4894 template<
typename T,
typename allocatorT>
4895 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4897 vec.insert(vec.begin() + index, item);
4900 template<
typename T,
typename allocatorT>
4901 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4903 vec.erase(vec.begin() + index);
4906 #else // #if VMA_USE_STL_VECTOR
4911 template<
typename T,
typename AllocatorT>
4915 typedef T value_type;
4917 VmaVector(
const AllocatorT& allocator) :
4918 m_Allocator(allocator),
4925 VmaVector(
size_t count,
const AllocatorT& allocator) :
4926 m_Allocator(allocator),
4927 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4935 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4936 : VmaVector(count, allocator) {}
4938 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4939 m_Allocator(src.m_Allocator),
4940 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4941 m_Count(src.m_Count),
4942 m_Capacity(src.m_Count)
4946 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4952 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4955 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4959 resize(rhs.m_Count);
4962 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4968 bool empty()
const {
return m_Count == 0; }
4969 size_t size()
const {
return m_Count; }
4970 T* data() {
return m_pArray; }
4971 const T* data()
const {
return m_pArray; }
4973 T& operator[](
size_t index)
4975 VMA_HEAVY_ASSERT(index < m_Count);
4976 return m_pArray[index];
4978 const T& operator[](
size_t index)
const
4980 VMA_HEAVY_ASSERT(index < m_Count);
4981 return m_pArray[index];
4986 VMA_HEAVY_ASSERT(m_Count > 0);
4989 const T& front()
const
4991 VMA_HEAVY_ASSERT(m_Count > 0);
4996 VMA_HEAVY_ASSERT(m_Count > 0);
4997 return m_pArray[m_Count - 1];
4999 const T& back()
const
5001 VMA_HEAVY_ASSERT(m_Count > 0);
5002 return m_pArray[m_Count - 1];
5005 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5007 newCapacity = VMA_MAX(newCapacity, m_Count);
5009 if((newCapacity < m_Capacity) && !freeMemory)
5011 newCapacity = m_Capacity;
5014 if(newCapacity != m_Capacity)
5016 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5019 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5021 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5022 m_Capacity = newCapacity;
5023 m_pArray = newArray;
5027 void resize(
size_t newCount,
bool freeMemory =
false)
5029 size_t newCapacity = m_Capacity;
5030 if(newCount > m_Capacity)
5032 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5036 newCapacity = newCount;
5039 if(newCapacity != m_Capacity)
5041 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5042 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5043 if(elementsToCopy != 0)
5045 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5047 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5048 m_Capacity = newCapacity;
5049 m_pArray = newArray;
5055 void clear(
bool freeMemory =
false)
5057 resize(0, freeMemory);
5060 void insert(
size_t index,
const T& src)
5062 VMA_HEAVY_ASSERT(index <= m_Count);
5063 const size_t oldCount = size();
5064 resize(oldCount + 1);
5065 if(index < oldCount)
5067 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5069 m_pArray[index] = src;
5072 void remove(
size_t index)
5074 VMA_HEAVY_ASSERT(index < m_Count);
5075 const size_t oldCount = size();
5076 if(index < oldCount - 1)
5078 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5080 resize(oldCount - 1);
5083 void push_back(
const T& src)
5085 const size_t newIndex = size();
5086 resize(newIndex + 1);
5087 m_pArray[newIndex] = src;
5092 VMA_HEAVY_ASSERT(m_Count > 0);
5096 void push_front(
const T& src)
5103 VMA_HEAVY_ASSERT(m_Count > 0);
5107 typedef T* iterator;
5109 iterator begin() {
return m_pArray; }
5110 iterator end() {
return m_pArray + m_Count; }
5113 AllocatorT m_Allocator;
5119 template<
typename T,
typename allocatorT>
5120 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5122 vec.insert(index, item);
5125 template<
typename T,
typename allocatorT>
5126 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5131 #endif // #if VMA_USE_STL_VECTOR
5133 template<
typename CmpLess,
typename VectorT>
5134 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5136 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5138 vector.data() + vector.size(),
5140 CmpLess()) - vector.data();
5141 VmaVectorInsert(vector, indexToInsert, value);
5142 return indexToInsert;
5145 template<
typename CmpLess,
typename VectorT>
5146 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5149 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5154 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5156 size_t indexToRemove = it - vector.begin();
5157 VmaVectorRemove(vector, indexToRemove);
5174 template<
typename T,
typename AllocatorT,
size_t N>
5175 class VmaSmallVector
5178 typedef T value_type;
5180 VmaSmallVector(
const AllocatorT& allocator) :
5182 m_DynamicArray(allocator)
5185 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5187 m_DynamicArray(count > N ? count : 0, allocator)
5190 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5191 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5192 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5193 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5195 bool empty()
const {
return m_Count == 0; }
5196 size_t size()
const {
return m_Count; }
5197 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5198 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5200 T& operator[](
size_t index)
5202 VMA_HEAVY_ASSERT(index < m_Count);
5203 return data()[index];
5205 const T& operator[](
size_t index)
const
5207 VMA_HEAVY_ASSERT(index < m_Count);
5208 return data()[index];
5213 VMA_HEAVY_ASSERT(m_Count > 0);
5216 const T& front()
const
5218 VMA_HEAVY_ASSERT(m_Count > 0);
5223 VMA_HEAVY_ASSERT(m_Count > 0);
5224 return data()[m_Count - 1];
5226 const T& back()
const
5228 VMA_HEAVY_ASSERT(m_Count > 0);
5229 return data()[m_Count - 1];
5232 void resize(
size_t newCount,
bool freeMemory =
false)
5234 if(newCount > N && m_Count > N)
5237 m_DynamicArray.resize(newCount, freeMemory);
5239 else if(newCount > N && m_Count <= N)
5242 m_DynamicArray.resize(newCount, freeMemory);
5245 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5248 else if(newCount <= N && m_Count > N)
5253 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5255 m_DynamicArray.resize(0, freeMemory);
5264 void clear(
bool freeMemory =
false)
5266 m_DynamicArray.clear(freeMemory);
5270 void insert(
size_t index,
const T& src)
5272 VMA_HEAVY_ASSERT(index <= m_Count);
5273 const size_t oldCount = size();
5274 resize(oldCount + 1);
5275 T*
const dataPtr = data();
5276 if(index < oldCount)
5279 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5281 dataPtr[index] = src;
5284 void remove(
size_t index)
5286 VMA_HEAVY_ASSERT(index < m_Count);
5287 const size_t oldCount = size();
5288 if(index < oldCount - 1)
5291 T*
const dataPtr = data();
5292 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5294 resize(oldCount - 1);
5297 void push_back(
const T& src)
5299 const size_t newIndex = size();
5300 resize(newIndex + 1);
5301 data()[newIndex] = src;
5306 VMA_HEAVY_ASSERT(m_Count > 0);
5310 void push_front(
const T& src)
5317 VMA_HEAVY_ASSERT(m_Count > 0);
5321 typedef T* iterator;
5323 iterator begin() {
return data(); }
5324 iterator end() {
return data() + m_Count; }
5329 VmaVector<T, AllocatorT> m_DynamicArray;
5340 template<
typename T>
5341 class VmaPoolAllocator
5343 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5345 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5346 ~VmaPoolAllocator();
5347 template<
typename... Types> T* Alloc(Types... args);
5353 uint32_t NextFreeIndex;
5354 alignas(T)
char Value[
sizeof(T)];
5361 uint32_t FirstFreeIndex;
5364 const VkAllocationCallbacks* m_pAllocationCallbacks;
5365 const uint32_t m_FirstBlockCapacity;
5366 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5368 ItemBlock& CreateNewBlock();
5371 template<
typename T>
5372 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5373 m_pAllocationCallbacks(pAllocationCallbacks),
5374 m_FirstBlockCapacity(firstBlockCapacity),
5375 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5377 VMA_ASSERT(m_FirstBlockCapacity > 1);
5380 template<
typename T>
5381 VmaPoolAllocator<T>::~VmaPoolAllocator()
5383 for(
size_t i = m_ItemBlocks.size(); i--; )
5384 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5385 m_ItemBlocks.clear();
5388 template<
typename T>
5389 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5391 for(
size_t i = m_ItemBlocks.size(); i--; )
5393 ItemBlock& block = m_ItemBlocks[i];
5395 if(block.FirstFreeIndex != UINT32_MAX)
5397 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5398 block.FirstFreeIndex = pItem->NextFreeIndex;
5399 T* result = (T*)&pItem->Value;
5400 new(result)T(std::forward<Types>(args)...);
5406 ItemBlock& newBlock = CreateNewBlock();
5407 Item*
const pItem = &newBlock.pItems[0];
5408 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5409 T* result = (T*)&pItem->Value;
5410 new(result)T(std::forward<Types>(args)...);
5414 template<
typename T>
5415 void VmaPoolAllocator<T>::Free(T* ptr)
5418 for(
size_t i = m_ItemBlocks.size(); i--; )
5420 ItemBlock& block = m_ItemBlocks[i];
5424 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5427 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5430 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5431 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5432 block.FirstFreeIndex = index;
5436 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5439 template<
typename T>
5440 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5442 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5443 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5445 const ItemBlock newBlock = {
5446 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5450 m_ItemBlocks.push_back(newBlock);
5453 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5454 newBlock.pItems[i].NextFreeIndex = i + 1;
5455 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5456 return m_ItemBlocks.back();
5462 #if VMA_USE_STL_LIST
5464 #define VmaList std::list
5466 #else // #if VMA_USE_STL_LIST
5468 template<
typename T>
5477 template<
typename T>
5480 VMA_CLASS_NO_COPY(VmaRawList)
5482 typedef VmaListItem<T> ItemType;
5484 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5488 size_t GetCount()
const {
return m_Count; }
5489 bool IsEmpty()
const {
return m_Count == 0; }
5491 ItemType* Front() {
return m_pFront; }
5492 const ItemType* Front()
const {
return m_pFront; }
5493 ItemType* Back() {
return m_pBack; }
5494 const ItemType* Back()
const {
return m_pBack; }
5496 ItemType* PushBack();
5497 ItemType* PushFront();
5498 ItemType* PushBack(
const T& value);
5499 ItemType* PushFront(
const T& value);
5504 ItemType* InsertBefore(ItemType* pItem);
5506 ItemType* InsertAfter(ItemType* pItem);
5508 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5509 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5511 void Remove(ItemType* pItem);
5514 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5515 VmaPoolAllocator<ItemType> m_ItemAllocator;
5521 template<
typename T>
5522 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5523 m_pAllocationCallbacks(pAllocationCallbacks),
5524 m_ItemAllocator(pAllocationCallbacks, 128),
5531 template<
typename T>
5532 VmaRawList<T>::~VmaRawList()
5538 template<
typename T>
5539 void VmaRawList<T>::Clear()
5541 if(IsEmpty() ==
false)
5543 ItemType* pItem = m_pBack;
5544 while(pItem != VMA_NULL)
5546 ItemType*
const pPrevItem = pItem->pPrev;
5547 m_ItemAllocator.Free(pItem);
5550 m_pFront = VMA_NULL;
5556 template<
typename T>
5557 VmaListItem<T>* VmaRawList<T>::PushBack()
5559 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5560 pNewItem->pNext = VMA_NULL;
5563 pNewItem->pPrev = VMA_NULL;
5564 m_pFront = pNewItem;
5570 pNewItem->pPrev = m_pBack;
5571 m_pBack->pNext = pNewItem;
5578 template<
typename T>
5579 VmaListItem<T>* VmaRawList<T>::PushFront()
5581 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5582 pNewItem->pPrev = VMA_NULL;
5585 pNewItem->pNext = VMA_NULL;
5586 m_pFront = pNewItem;
5592 pNewItem->pNext = m_pFront;
5593 m_pFront->pPrev = pNewItem;
5594 m_pFront = pNewItem;
5600 template<
typename T>
5601 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5603 ItemType*
const pNewItem = PushBack();
5604 pNewItem->Value = value;
5608 template<
typename T>
5609 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5611 ItemType*
const pNewItem = PushFront();
5612 pNewItem->Value = value;
5616 template<
typename T>
5617 void VmaRawList<T>::PopBack()
5619 VMA_HEAVY_ASSERT(m_Count > 0);
5620 ItemType*
const pBackItem = m_pBack;
5621 ItemType*
const pPrevItem = pBackItem->pPrev;
5622 if(pPrevItem != VMA_NULL)
5624 pPrevItem->pNext = VMA_NULL;
5626 m_pBack = pPrevItem;
5627 m_ItemAllocator.Free(pBackItem);
5631 template<
typename T>
5632 void VmaRawList<T>::PopFront()
5634 VMA_HEAVY_ASSERT(m_Count > 0);
5635 ItemType*
const pFrontItem = m_pFront;
5636 ItemType*
const pNextItem = pFrontItem->pNext;
5637 if(pNextItem != VMA_NULL)
5639 pNextItem->pPrev = VMA_NULL;
5641 m_pFront = pNextItem;
5642 m_ItemAllocator.Free(pFrontItem);
5646 template<
typename T>
5647 void VmaRawList<T>::Remove(ItemType* pItem)
5649 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5650 VMA_HEAVY_ASSERT(m_Count > 0);
5652 if(pItem->pPrev != VMA_NULL)
5654 pItem->pPrev->pNext = pItem->pNext;
5658 VMA_HEAVY_ASSERT(m_pFront == pItem);
5659 m_pFront = pItem->pNext;
5662 if(pItem->pNext != VMA_NULL)
5664 pItem->pNext->pPrev = pItem->pPrev;
5668 VMA_HEAVY_ASSERT(m_pBack == pItem);
5669 m_pBack = pItem->pPrev;
5672 m_ItemAllocator.Free(pItem);
5676 template<
typename T>
5677 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5679 if(pItem != VMA_NULL)
5681 ItemType*
const prevItem = pItem->pPrev;
5682 ItemType*
const newItem = m_ItemAllocator.Alloc();
5683 newItem->pPrev = prevItem;
5684 newItem->pNext = pItem;
5685 pItem->pPrev = newItem;
5686 if(prevItem != VMA_NULL)
5688 prevItem->pNext = newItem;
5692 VMA_HEAVY_ASSERT(m_pFront == pItem);
5702 template<
typename T>
5703 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5705 if(pItem != VMA_NULL)
5707 ItemType*
const nextItem = pItem->pNext;
5708 ItemType*
const newItem = m_ItemAllocator.Alloc();
5709 newItem->pNext = nextItem;
5710 newItem->pPrev = pItem;
5711 pItem->pNext = newItem;
5712 if(nextItem != VMA_NULL)
5714 nextItem->pPrev = newItem;
5718 VMA_HEAVY_ASSERT(m_pBack == pItem);
5728 template<
typename T>
5729 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5731 ItemType*
const newItem = InsertBefore(pItem);
5732 newItem->Value = value;
5736 template<
typename T>
5737 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5739 ItemType*
const newItem = InsertAfter(pItem);
5740 newItem->Value = value;
5744 template<
typename T,
typename AllocatorT>
5747 VMA_CLASS_NO_COPY(VmaList)
5758 T& operator*()
const
5760 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5761 return m_pItem->Value;
5763 T* operator->()
const
5765 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5766 return &m_pItem->Value;
5769 iterator& operator++()
5771 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5772 m_pItem = m_pItem->pNext;
5775 iterator& operator--()
5777 if(m_pItem != VMA_NULL)
5779 m_pItem = m_pItem->pPrev;
5783 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5784 m_pItem = m_pList->Back();
5789 iterator operator++(
int)
5791 iterator result = *
this;
5795 iterator operator--(
int)
5797 iterator result = *
this;
5802 bool operator==(
const iterator& rhs)
const
5804 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5805 return m_pItem == rhs.m_pItem;
5807 bool operator!=(
const iterator& rhs)
const
5809 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5810 return m_pItem != rhs.m_pItem;
5814 VmaRawList<T>* m_pList;
5815 VmaListItem<T>* m_pItem;
5817 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5823 friend class VmaList<T, AllocatorT>;
5826 class const_iterator
5835 const_iterator(
const iterator& src) :
5836 m_pList(src.m_pList),
5837 m_pItem(src.m_pItem)
5841 const T& operator*()
const
5843 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5844 return m_pItem->Value;
5846 const T* operator->()
const
5848 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5849 return &m_pItem->Value;
5852 const_iterator& operator++()
5854 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5855 m_pItem = m_pItem->pNext;
5858 const_iterator& operator--()
5860 if(m_pItem != VMA_NULL)
5862 m_pItem = m_pItem->pPrev;
5866 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5867 m_pItem = m_pList->Back();
5872 const_iterator operator++(
int)
5874 const_iterator result = *
this;
5878 const_iterator operator--(
int)
5880 const_iterator result = *
this;
5885 bool operator==(
const const_iterator& rhs)
const
5887 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5888 return m_pItem == rhs.m_pItem;
5890 bool operator!=(
const const_iterator& rhs)
const
5892 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5893 return m_pItem != rhs.m_pItem;
5897 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5903 const VmaRawList<T>* m_pList;
5904 const VmaListItem<T>* m_pItem;
5906 friend class VmaList<T, AllocatorT>;
5909 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5911 bool empty()
const {
return m_RawList.IsEmpty(); }
5912 size_t size()
const {
return m_RawList.GetCount(); }
5914 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5915 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5917 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5918 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5920 void clear() { m_RawList.Clear(); }
5921 void push_back(
const T& value) { m_RawList.PushBack(value); }
5922 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5923 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5926 VmaRawList<T> m_RawList;
5929 #endif // #if VMA_USE_STL_LIST
5937 #if VMA_USE_STL_UNORDERED_MAP
5939 #define VmaPair std::pair
5941 #define VMA_MAP_TYPE(KeyT, ValueT) \
5942 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5944 #else // #if VMA_USE_STL_UNORDERED_MAP
5946 template<
typename T1,
typename T2>
5952 VmaPair() : first(), second() { }
5953 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5959 template<
typename KeyT,
typename ValueT>
5963 typedef VmaPair<KeyT, ValueT> PairType;
5964 typedef PairType* iterator;
5966 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5968 iterator begin() {
return m_Vector.begin(); }
5969 iterator end() {
return m_Vector.end(); }
5971 void insert(
const PairType& pair);
5972 iterator find(
const KeyT& key);
5973 void erase(iterator it);
5976 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5979 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5981 template<
typename FirstT,
typename SecondT>
5982 struct VmaPairFirstLess
5984 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5986 return lhs.first < rhs.first;
5988 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5990 return lhs.first < rhsFirst;
5994 template<
typename KeyT,
typename ValueT>
5995 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5997 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5999 m_Vector.data() + m_Vector.size(),
6001 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6002 VmaVectorInsert(m_Vector, indexToInsert, pair);
6005 template<
typename KeyT,
typename ValueT>
6006 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6008 PairType* it = VmaBinaryFindFirstNotLess(
6010 m_Vector.data() + m_Vector.size(),
6012 VmaPairFirstLess<KeyT, ValueT>());
6013 if((it != m_Vector.end()) && (it->first == key))
6019 return m_Vector.end();
6023 template<
typename KeyT,
typename ValueT>
6024 void VmaMap<KeyT, ValueT>::erase(iterator it)
6026 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6029 #endif // #if VMA_USE_STL_UNORDERED_MAP
6035 class VmaDeviceMemoryBlock;
6037 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6039 struct VmaAllocation_T
6042 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6046 FLAG_USER_DATA_STRING = 0x01,
6050 enum ALLOCATION_TYPE
6052 ALLOCATION_TYPE_NONE,
6053 ALLOCATION_TYPE_BLOCK,
6054 ALLOCATION_TYPE_DEDICATED,
6061 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6064 m_pUserData{VMA_NULL},
6065 m_LastUseFrameIndex{currentFrameIndex},
6066 m_MemoryTypeIndex{0},
6067 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6068 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6070 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6072 #if VMA_STATS_STRING_ENABLED
6073 m_CreationFrameIndex = currentFrameIndex;
6074 m_BufferImageUsage = 0;
6080 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6083 VMA_ASSERT(m_pUserData == VMA_NULL);
6086 void InitBlockAllocation(
6087 VmaDeviceMemoryBlock* block,
6088 VkDeviceSize offset,
6089 VkDeviceSize alignment,
6091 uint32_t memoryTypeIndex,
6092 VmaSuballocationType suballocationType,
6096 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6097 VMA_ASSERT(block != VMA_NULL);
6098 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6099 m_Alignment = alignment;
6101 m_MemoryTypeIndex = memoryTypeIndex;
6102 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6103 m_SuballocationType = (uint8_t)suballocationType;
6104 m_BlockAllocation.m_Block = block;
6105 m_BlockAllocation.m_Offset = offset;
6106 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6111 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6112 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6113 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6114 m_MemoryTypeIndex = 0;
6115 m_BlockAllocation.m_Block = VMA_NULL;
6116 m_BlockAllocation.m_Offset = 0;
6117 m_BlockAllocation.m_CanBecomeLost =
true;
6120 void ChangeBlockAllocation(
6122 VmaDeviceMemoryBlock* block,
6123 VkDeviceSize offset);
6125 void ChangeOffset(VkDeviceSize newOffset);
6128 void InitDedicatedAllocation(
6129 uint32_t memoryTypeIndex,
6130 VkDeviceMemory hMemory,
6131 VmaSuballocationType suballocationType,
6135 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6136 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6137 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6140 m_MemoryTypeIndex = memoryTypeIndex;
6141 m_SuballocationType = (uint8_t)suballocationType;
6142 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6143 m_DedicatedAllocation.m_hMemory = hMemory;
6144 m_DedicatedAllocation.m_pMappedData = pMappedData;
6147 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6148 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6149 VkDeviceSize GetSize()
const {
return m_Size; }
6150 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6151 void* GetUserData()
const {
return m_pUserData; }
6152 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6153 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6155 VmaDeviceMemoryBlock* GetBlock()
const
6157 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6158 return m_BlockAllocation.m_Block;
6160 VkDeviceSize GetOffset()
const;
6161 VkDeviceMemory GetMemory()
const;
6162 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6163 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6164 void* GetMappedData()
const;
6165 bool CanBecomeLost()
const;
6167 uint32_t GetLastUseFrameIndex()
const
6169 return m_LastUseFrameIndex.load();
6171 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6173 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6183 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6185 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6187 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6198 void BlockAllocMap();
6199 void BlockAllocUnmap();
6200 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6203 #if VMA_STATS_STRING_ENABLED
6204 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6205 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6207 void InitBufferImageUsage(uint32_t bufferImageUsage)
6209 VMA_ASSERT(m_BufferImageUsage == 0);
6210 m_BufferImageUsage = bufferImageUsage;
6213 void PrintParameters(
class VmaJsonWriter& json)
const;
6217 VkDeviceSize m_Alignment;
6218 VkDeviceSize m_Size;
6220 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6221 uint32_t m_MemoryTypeIndex;
6223 uint8_t m_SuballocationType;
6230 struct BlockAllocation
6232 VmaDeviceMemoryBlock* m_Block;
6233 VkDeviceSize m_Offset;
6234 bool m_CanBecomeLost;
6238 struct DedicatedAllocation
6240 VkDeviceMemory m_hMemory;
6241 void* m_pMappedData;
6247 BlockAllocation m_BlockAllocation;
6249 DedicatedAllocation m_DedicatedAllocation;
6252 #if VMA_STATS_STRING_ENABLED
6253 uint32_t m_CreationFrameIndex;
6254 uint32_t m_BufferImageUsage;
6264 struct VmaSuballocation
6266 VkDeviceSize offset;
6269 VmaSuballocationType type;
6273 struct VmaSuballocationOffsetLess
6275 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6277 return lhs.offset < rhs.offset;
6280 struct VmaSuballocationOffsetGreater
6282 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6284 return lhs.offset > rhs.offset;
6288 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6291 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6293 enum class VmaAllocationRequestType
6315 struct VmaAllocationRequest
6317 VkDeviceSize offset;
6318 VkDeviceSize sumFreeSize;
6319 VkDeviceSize sumItemSize;
6320 VmaSuballocationList::iterator item;
6321 size_t itemsToMakeLostCount;
6323 VmaAllocationRequestType type;
6325 VkDeviceSize CalcCost()
const
6327 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6335 class VmaBlockMetadata
6339 virtual ~VmaBlockMetadata() { }
6340 virtual void Init(VkDeviceSize size) { m_Size = size; }
6343 virtual bool Validate()
const = 0;
6344 VkDeviceSize GetSize()
const {
return m_Size; }
6345 virtual size_t GetAllocationCount()
const = 0;
6346 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6347 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6349 virtual bool IsEmpty()
const = 0;
6351 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6353 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6355 #if VMA_STATS_STRING_ENABLED
6356 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6362 virtual bool CreateAllocationRequest(
6363 uint32_t currentFrameIndex,
6364 uint32_t frameInUseCount,
6365 VkDeviceSize bufferImageGranularity,
6366 VkDeviceSize allocSize,
6367 VkDeviceSize allocAlignment,
6369 VmaSuballocationType allocType,
6370 bool canMakeOtherLost,
6373 VmaAllocationRequest* pAllocationRequest) = 0;
6375 virtual bool MakeRequestedAllocationsLost(
6376 uint32_t currentFrameIndex,
6377 uint32_t frameInUseCount,
6378 VmaAllocationRequest* pAllocationRequest) = 0;
6380 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6382 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6386 const VmaAllocationRequest& request,
6387 VmaSuballocationType type,
6388 VkDeviceSize allocSize,
6393 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6396 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6398 #if VMA_STATS_STRING_ENABLED
6399 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6400 VkDeviceSize unusedBytes,
6401 size_t allocationCount,
6402 size_t unusedRangeCount)
const;
6403 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6404 VkDeviceSize offset,
6406 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6407 VkDeviceSize offset,
6408 VkDeviceSize size)
const;
6409 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6413 VkDeviceSize m_Size;
6414 const VkAllocationCallbacks* m_pAllocationCallbacks;
6417 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6418 VMA_ASSERT(0 && "Validation failed: " #cond); \
6422 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6424 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6427 virtual ~VmaBlockMetadata_Generic();
6428 virtual void Init(VkDeviceSize size);
6430 virtual bool Validate()
const;
6431 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6432 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6433 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6434 virtual bool IsEmpty()
const;
6436 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6437 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6439 #if VMA_STATS_STRING_ENABLED
6440 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6443 virtual bool CreateAllocationRequest(
6444 uint32_t currentFrameIndex,
6445 uint32_t frameInUseCount,
6446 VkDeviceSize bufferImageGranularity,
6447 VkDeviceSize allocSize,
6448 VkDeviceSize allocAlignment,
6450 VmaSuballocationType allocType,
6451 bool canMakeOtherLost,
6453 VmaAllocationRequest* pAllocationRequest);
6455 virtual bool MakeRequestedAllocationsLost(
6456 uint32_t currentFrameIndex,
6457 uint32_t frameInUseCount,
6458 VmaAllocationRequest* pAllocationRequest);
6460 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6462 virtual VkResult CheckCorruption(
const void* pBlockData);
6465 const VmaAllocationRequest& request,
6466 VmaSuballocationType type,
6467 VkDeviceSize allocSize,
6471 virtual void FreeAtOffset(VkDeviceSize offset);
6476 bool IsBufferImageGranularityConflictPossible(
6477 VkDeviceSize bufferImageGranularity,
6478 VmaSuballocationType& inOutPrevSuballocType)
const;
6481 friend class VmaDefragmentationAlgorithm_Generic;
6482 friend class VmaDefragmentationAlgorithm_Fast;
6484 uint32_t m_FreeCount;
6485 VkDeviceSize m_SumFreeSize;
6486 VmaSuballocationList m_Suballocations;
6489 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6491 bool ValidateFreeSuballocationList()
const;
6495 bool CheckAllocation(
6496 uint32_t currentFrameIndex,
6497 uint32_t frameInUseCount,
6498 VkDeviceSize bufferImageGranularity,
6499 VkDeviceSize allocSize,
6500 VkDeviceSize allocAlignment,
6501 VmaSuballocationType allocType,
6502 VmaSuballocationList::const_iterator suballocItem,
6503 bool canMakeOtherLost,
6504 VkDeviceSize* pOffset,
6505 size_t* itemsToMakeLostCount,
6506 VkDeviceSize* pSumFreeSize,
6507 VkDeviceSize* pSumItemSize)
const;
6509 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6513 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6516 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6519 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6600 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6602 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6605 virtual ~VmaBlockMetadata_Linear();
6606 virtual void Init(VkDeviceSize size);
6608 virtual bool Validate()
const;
6609 virtual size_t GetAllocationCount()
const;
6610 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6611 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6612 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6614 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6615 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6617 #if VMA_STATS_STRING_ENABLED
6618 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6621 virtual bool CreateAllocationRequest(
6622 uint32_t currentFrameIndex,
6623 uint32_t frameInUseCount,
6624 VkDeviceSize bufferImageGranularity,
6625 VkDeviceSize allocSize,
6626 VkDeviceSize allocAlignment,
6628 VmaSuballocationType allocType,
6629 bool canMakeOtherLost,
6631 VmaAllocationRequest* pAllocationRequest);
6633 virtual bool MakeRequestedAllocationsLost(
6634 uint32_t currentFrameIndex,
6635 uint32_t frameInUseCount,
6636 VmaAllocationRequest* pAllocationRequest);
6638 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6640 virtual VkResult CheckCorruption(
const void* pBlockData);
6643 const VmaAllocationRequest& request,
6644 VmaSuballocationType type,
6645 VkDeviceSize allocSize,
6649 virtual void FreeAtOffset(VkDeviceSize offset);
6659 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6661 enum SECOND_VECTOR_MODE
6663 SECOND_VECTOR_EMPTY,
6668 SECOND_VECTOR_RING_BUFFER,
6674 SECOND_VECTOR_DOUBLE_STACK,
6677 VkDeviceSize m_SumFreeSize;
6678 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6679 uint32_t m_1stVectorIndex;
6680 SECOND_VECTOR_MODE m_2ndVectorMode;
6682 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6683 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6684 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6685 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6688 size_t m_1stNullItemsBeginCount;
6690 size_t m_1stNullItemsMiddleCount;
6692 size_t m_2ndNullItemsCount;
6694 bool ShouldCompact1st()
const;
6695 void CleanupAfterFree();
6697 bool CreateAllocationRequest_LowerAddress(
6698 uint32_t currentFrameIndex,
6699 uint32_t frameInUseCount,
6700 VkDeviceSize bufferImageGranularity,
6701 VkDeviceSize allocSize,
6702 VkDeviceSize allocAlignment,
6703 VmaSuballocationType allocType,
6704 bool canMakeOtherLost,
6706 VmaAllocationRequest* pAllocationRequest);
6707 bool CreateAllocationRequest_UpperAddress(
6708 uint32_t currentFrameIndex,
6709 uint32_t frameInUseCount,
6710 VkDeviceSize bufferImageGranularity,
6711 VkDeviceSize allocSize,
6712 VkDeviceSize allocAlignment,
6713 VmaSuballocationType allocType,
6714 bool canMakeOtherLost,
6716 VmaAllocationRequest* pAllocationRequest);
6730 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6732 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6735 virtual ~VmaBlockMetadata_Buddy();
6736 virtual void Init(VkDeviceSize size);
6738 virtual bool Validate()
const;
6739 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6740 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6741 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6742 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6744 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6745 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6747 #if VMA_STATS_STRING_ENABLED
6748 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6751 virtual bool CreateAllocationRequest(
6752 uint32_t currentFrameIndex,
6753 uint32_t frameInUseCount,
6754 VkDeviceSize bufferImageGranularity,
6755 VkDeviceSize allocSize,
6756 VkDeviceSize allocAlignment,
6758 VmaSuballocationType allocType,
6759 bool canMakeOtherLost,
6761 VmaAllocationRequest* pAllocationRequest);
6763 virtual bool MakeRequestedAllocationsLost(
6764 uint32_t currentFrameIndex,
6765 uint32_t frameInUseCount,
6766 VmaAllocationRequest* pAllocationRequest);
6768 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6770 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6773 const VmaAllocationRequest& request,
6774 VmaSuballocationType type,
6775 VkDeviceSize allocSize,
6778 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6779 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6782 static const VkDeviceSize MIN_NODE_SIZE = 32;
6783 static const size_t MAX_LEVELS = 30;
6785 struct ValidationContext
6787 size_t calculatedAllocationCount;
6788 size_t calculatedFreeCount;
6789 VkDeviceSize calculatedSumFreeSize;
6791 ValidationContext() :
6792 calculatedAllocationCount(0),
6793 calculatedFreeCount(0),
6794 calculatedSumFreeSize(0) { }
6799 VkDeviceSize offset;
6829 VkDeviceSize m_UsableSize;
6830 uint32_t m_LevelCount;
6836 } m_FreeList[MAX_LEVELS];
6838 size_t m_AllocationCount;
6842 VkDeviceSize m_SumFreeSize;
6844 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6845 void DeleteNode(Node* node);
6846 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6847 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6848 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6850 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6851 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6855 void AddToFreeListFront(uint32_t level, Node* node);
6859 void RemoveFromFreeList(uint32_t level, Node* node);
6861 #if VMA_STATS_STRING_ENABLED
6862 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6872 class VmaDeviceMemoryBlock
6874 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6876 VmaBlockMetadata* m_pMetadata;
6880 ~VmaDeviceMemoryBlock()
6882 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6883 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6890 uint32_t newMemoryTypeIndex,
6891 VkDeviceMemory newMemory,
6892 VkDeviceSize newSize,
6894 uint32_t algorithm);
6898 VmaPool GetParentPool()
const {
return m_hParentPool; }
6899 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6900 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6901 uint32_t GetId()
const {
return m_Id; }
6902 void* GetMappedData()
const {
return m_pMappedData; }
6905 bool Validate()
const;
6910 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6913 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6914 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6916 VkResult BindBufferMemory(
6919 VkDeviceSize allocationLocalOffset,
6922 VkResult BindImageMemory(
6925 VkDeviceSize allocationLocalOffset,
6931 uint32_t m_MemoryTypeIndex;
6933 VkDeviceMemory m_hMemory;
6941 uint32_t m_MapCount;
6942 void* m_pMappedData;
6945 struct VmaPointerLess
6947 bool operator()(
const void* lhs,
const void* rhs)
const
6953 struct VmaDefragmentationMove
6955 size_t srcBlockIndex;
6956 size_t dstBlockIndex;
6957 VkDeviceSize srcOffset;
6958 VkDeviceSize dstOffset;
6961 VmaDeviceMemoryBlock* pSrcBlock;
6962 VmaDeviceMemoryBlock* pDstBlock;
6965 class VmaDefragmentationAlgorithm;
6973 struct VmaBlockVector
6975 VMA_CLASS_NO_COPY(VmaBlockVector)
6980 uint32_t memoryTypeIndex,
6981 VkDeviceSize preferredBlockSize,
6982 size_t minBlockCount,
6983 size_t maxBlockCount,
6984 VkDeviceSize bufferImageGranularity,
6985 uint32_t frameInUseCount,
6986 bool explicitBlockSize,
6987 uint32_t algorithm);
6990 VkResult CreateMinBlocks();
6992 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6993 VmaPool GetParentPool()
const {
return m_hParentPool; }
6994 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6995 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6996 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6997 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6998 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6999 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7004 bool IsCorruptionDetectionEnabled()
const;
7007 uint32_t currentFrameIndex,
7009 VkDeviceSize alignment,
7011 VmaSuballocationType suballocType,
7012 size_t allocationCount,
7020 #if VMA_STATS_STRING_ENABLED
7021 void PrintDetailedMap(
class VmaJsonWriter& json);
7024 void MakePoolAllocationsLost(
7025 uint32_t currentFrameIndex,
7026 size_t* pLostAllocationCount);
7027 VkResult CheckCorruption();
7031 class VmaBlockVectorDefragmentationContext* pCtx,
7033 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7034 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7035 VkCommandBuffer commandBuffer);
7036 void DefragmentationEnd(
7037 class VmaBlockVectorDefragmentationContext* pCtx,
7041 uint32_t ProcessDefragmentations(
7042 class VmaBlockVectorDefragmentationContext *pCtx,
7045 void CommitDefragmentations(
7046 class VmaBlockVectorDefragmentationContext *pCtx,
7052 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7053 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7054 size_t CalcAllocationCount()
const;
7055 bool IsBufferImageGranularityConflictPossible()
const;
7058 friend class VmaDefragmentationAlgorithm_Generic;
7062 const uint32_t m_MemoryTypeIndex;
7063 const VkDeviceSize m_PreferredBlockSize;
7064 const size_t m_MinBlockCount;
7065 const size_t m_MaxBlockCount;
7066 const VkDeviceSize m_BufferImageGranularity;
7067 const uint32_t m_FrameInUseCount;
7068 const bool m_ExplicitBlockSize;
7069 const uint32_t m_Algorithm;
7070 VMA_RW_MUTEX m_Mutex;
7074 bool m_HasEmptyBlock;
7076 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7077 uint32_t m_NextBlockId;
7079 VkDeviceSize CalcMaxBlockSize()
const;
7082 void Remove(VmaDeviceMemoryBlock* pBlock);
7086 void IncrementallySortBlocks();
7088 VkResult AllocatePage(
7089 uint32_t currentFrameIndex,
7091 VkDeviceSize alignment,
7093 VmaSuballocationType suballocType,
7097 VkResult AllocateFromBlock(
7098 VmaDeviceMemoryBlock* pBlock,
7099 uint32_t currentFrameIndex,
7101 VkDeviceSize alignment,
7104 VmaSuballocationType suballocType,
7108 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7111 void ApplyDefragmentationMovesCpu(
7112 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7113 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7115 void ApplyDefragmentationMovesGpu(
7116 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7117 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7118 VkCommandBuffer commandBuffer);
7126 void UpdateHasEmptyBlock();
7131 VMA_CLASS_NO_COPY(VmaPool_T)
7133 VmaBlockVector m_BlockVector;
7138 VkDeviceSize preferredBlockSize);
7141 uint32_t GetId()
const {
return m_Id; }
7142 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7144 const char* GetName()
const {
return m_Name; }
7145 void SetName(
const char* pName);
7147 #if VMA_STATS_STRING_ENABLED
7163 class VmaDefragmentationAlgorithm
7165 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7167 VmaDefragmentationAlgorithm(
7169 VmaBlockVector* pBlockVector,
7170 uint32_t currentFrameIndex) :
7171 m_hAllocator(hAllocator),
7172 m_pBlockVector(pBlockVector),
7173 m_CurrentFrameIndex(currentFrameIndex)
7176 virtual ~VmaDefragmentationAlgorithm()
7180 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7181 virtual void AddAll() = 0;
7183 virtual VkResult Defragment(
7184 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7185 VkDeviceSize maxBytesToMove,
7186 uint32_t maxAllocationsToMove,
7189 virtual VkDeviceSize GetBytesMoved()
const = 0;
7190 virtual uint32_t GetAllocationsMoved()
const = 0;
7194 VmaBlockVector*
const m_pBlockVector;
7195 const uint32_t m_CurrentFrameIndex;
7197 struct AllocationInfo
7200 VkBool32* m_pChanged;
7203 m_hAllocation(VK_NULL_HANDLE),
7204 m_pChanged(VMA_NULL)
7208 m_hAllocation(hAlloc),
7209 m_pChanged(pChanged)
7215 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7217 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7219 VmaDefragmentationAlgorithm_Generic(
7221 VmaBlockVector* pBlockVector,
7222 uint32_t currentFrameIndex,
7223 bool overlappingMoveSupported);
7224 virtual ~VmaDefragmentationAlgorithm_Generic();
7226 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7227 virtual void AddAll() { m_AllAllocations =
true; }
7229 virtual VkResult Defragment(
7230 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7231 VkDeviceSize maxBytesToMove,
7232 uint32_t maxAllocationsToMove,
7235 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7236 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7239 uint32_t m_AllocationCount;
7240 bool m_AllAllocations;
7242 VkDeviceSize m_BytesMoved;
7243 uint32_t m_AllocationsMoved;
7245 struct AllocationInfoSizeGreater
7247 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7249 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7253 struct AllocationInfoOffsetGreater
7255 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7257 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7263 size_t m_OriginalBlockIndex;
7264 VmaDeviceMemoryBlock* m_pBlock;
7265 bool m_HasNonMovableAllocations;
7266 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7268 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7269 m_OriginalBlockIndex(SIZE_MAX),
7271 m_HasNonMovableAllocations(true),
7272 m_Allocations(pAllocationCallbacks)
7276 void CalcHasNonMovableAllocations()
7278 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7279 const size_t defragmentAllocCount = m_Allocations.size();
7280 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7283 void SortAllocationsBySizeDescending()
7285 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7288 void SortAllocationsByOffsetDescending()
7290 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7294 struct BlockPointerLess
7296 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7298 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7300 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7302 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7308 struct BlockInfoCompareMoveDestination
7310 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7312 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7316 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7320 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7328 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7329 BlockInfoVector m_Blocks;
7331 VkResult DefragmentRound(
7332 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7333 VkDeviceSize maxBytesToMove,
7334 uint32_t maxAllocationsToMove,
7335 bool freeOldAllocations);
7337 size_t CalcBlocksWithNonMovableCount()
const;
7339 static bool MoveMakesSense(
7340 size_t dstBlockIndex, VkDeviceSize dstOffset,
7341 size_t srcBlockIndex, VkDeviceSize srcOffset);
7344 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7346 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7348 VmaDefragmentationAlgorithm_Fast(
7350 VmaBlockVector* pBlockVector,
7351 uint32_t currentFrameIndex,
7352 bool overlappingMoveSupported);
7353 virtual ~VmaDefragmentationAlgorithm_Fast();
7355 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7356 virtual void AddAll() { m_AllAllocations =
true; }
7358 virtual VkResult Defragment(
7359 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7360 VkDeviceSize maxBytesToMove,
7361 uint32_t maxAllocationsToMove,
7364 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7365 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7370 size_t origBlockIndex;
7373 class FreeSpaceDatabase
7379 s.blockInfoIndex = SIZE_MAX;
7380 for(
size_t i = 0; i < MAX_COUNT; ++i)
7382 m_FreeSpaces[i] = s;
7386 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7388 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7394 size_t bestIndex = SIZE_MAX;
7395 for(
size_t i = 0; i < MAX_COUNT; ++i)
7398 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7403 if(m_FreeSpaces[i].size < size &&
7404 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7410 if(bestIndex != SIZE_MAX)
7412 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7413 m_FreeSpaces[bestIndex].offset = offset;
7414 m_FreeSpaces[bestIndex].size = size;
7418 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7419 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7421 size_t bestIndex = SIZE_MAX;
7422 VkDeviceSize bestFreeSpaceAfter = 0;
7423 for(
size_t i = 0; i < MAX_COUNT; ++i)
7426 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7428 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7430 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7432 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7434 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7437 bestFreeSpaceAfter = freeSpaceAfter;
7443 if(bestIndex != SIZE_MAX)
7445 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7446 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7448 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7451 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7452 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7453 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7458 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7468 static const size_t MAX_COUNT = 4;
7472 size_t blockInfoIndex;
7473 VkDeviceSize offset;
7475 } m_FreeSpaces[MAX_COUNT];
7478 const bool m_OverlappingMoveSupported;
7480 uint32_t m_AllocationCount;
7481 bool m_AllAllocations;
7483 VkDeviceSize m_BytesMoved;
7484 uint32_t m_AllocationsMoved;
7486 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7488 void PreprocessMetadata();
7489 void PostprocessMetadata();
7490 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7493 struct VmaBlockDefragmentationContext
7497 BLOCK_FLAG_USED = 0x00000001,
7503 class VmaBlockVectorDefragmentationContext
7505 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7509 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7510 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7511 uint32_t defragmentationMovesProcessed;
7512 uint32_t defragmentationMovesCommitted;
7513 bool hasDefragmentationPlan;
7515 VmaBlockVectorDefragmentationContext(
7518 VmaBlockVector* pBlockVector,
7519 uint32_t currFrameIndex);
7520 ~VmaBlockVectorDefragmentationContext();
7522 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7523 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7524 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7526 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7527 void AddAll() { m_AllAllocations =
true; }
7536 VmaBlockVector*
const m_pBlockVector;
7537 const uint32_t m_CurrFrameIndex;
7539 VmaDefragmentationAlgorithm* m_pAlgorithm;
7547 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7548 bool m_AllAllocations;
7551 struct VmaDefragmentationContext_T
7554 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7556 VmaDefragmentationContext_T(
7558 uint32_t currFrameIndex,
7561 ~VmaDefragmentationContext_T();
7563 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7564 void AddAllocations(
7565 uint32_t allocationCount,
7567 VkBool32* pAllocationsChanged);
7575 VkResult Defragment(
7576 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7577 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7581 VkResult DefragmentPassEnd();
7585 const uint32_t m_CurrFrameIndex;
7586 const uint32_t m_Flags;
7589 VkDeviceSize m_MaxCpuBytesToMove;
7590 uint32_t m_MaxCpuAllocationsToMove;
7591 VkDeviceSize m_MaxGpuBytesToMove;
7592 uint32_t m_MaxGpuAllocationsToMove;
7595 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7597 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7600 #if VMA_RECORDING_ENABLED
7607 void WriteConfiguration(
7608 const VkPhysicalDeviceProperties& devProps,
7609 const VkPhysicalDeviceMemoryProperties& memProps,
7610 uint32_t vulkanApiVersion,
7611 bool dedicatedAllocationExtensionEnabled,
7612 bool bindMemory2ExtensionEnabled,
7613 bool memoryBudgetExtensionEnabled,
7614 bool deviceCoherentMemoryExtensionEnabled);
7617 void RecordCreateAllocator(uint32_t frameIndex);
7618 void RecordDestroyAllocator(uint32_t frameIndex);
7619 void RecordCreatePool(uint32_t frameIndex,
7622 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7623 void RecordAllocateMemory(uint32_t frameIndex,
7624 const VkMemoryRequirements& vkMemReq,
7627 void RecordAllocateMemoryPages(uint32_t frameIndex,
7628 const VkMemoryRequirements& vkMemReq,
7630 uint64_t allocationCount,
7632 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7633 const VkMemoryRequirements& vkMemReq,
7634 bool requiresDedicatedAllocation,
7635 bool prefersDedicatedAllocation,
7638 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7639 const VkMemoryRequirements& vkMemReq,
7640 bool requiresDedicatedAllocation,
7641 bool prefersDedicatedAllocation,
7644 void RecordFreeMemory(uint32_t frameIndex,
7646 void RecordFreeMemoryPages(uint32_t frameIndex,
7647 uint64_t allocationCount,
7649 void RecordSetAllocationUserData(uint32_t frameIndex,
7651 const void* pUserData);
7652 void RecordCreateLostAllocation(uint32_t frameIndex,
7654 void RecordMapMemory(uint32_t frameIndex,
7656 void RecordUnmapMemory(uint32_t frameIndex,
7658 void RecordFlushAllocation(uint32_t frameIndex,
7659 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7660 void RecordInvalidateAllocation(uint32_t frameIndex,
7661 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7662 void RecordCreateBuffer(uint32_t frameIndex,
7663 const VkBufferCreateInfo& bufCreateInfo,
7666 void RecordCreateImage(uint32_t frameIndex,
7667 const VkImageCreateInfo& imageCreateInfo,
7670 void RecordDestroyBuffer(uint32_t frameIndex,
7672 void RecordDestroyImage(uint32_t frameIndex,
7674 void RecordTouchAllocation(uint32_t frameIndex,
7676 void RecordGetAllocationInfo(uint32_t frameIndex,
7678 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7680 void RecordDefragmentationBegin(uint32_t frameIndex,
7683 void RecordDefragmentationEnd(uint32_t frameIndex,
7685 void RecordSetPoolName(uint32_t frameIndex,
7696 class UserDataString
7700 const char* GetString()
const {
return m_Str; }
7710 VMA_MUTEX m_FileMutex;
7711 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
7713 void GetBasicParams(CallParams& outParams);
7716 template<
typename T>
7717 void PrintPointerList(uint64_t count,
const T* pItems)
7721 fprintf(m_File,
"%p", pItems[0]);
7722 for(uint64_t i = 1; i < count; ++i)
7724 fprintf(m_File,
" %p", pItems[i]);
7729 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7733 #endif // #if VMA_RECORDING_ENABLED
7738 class VmaAllocationObjectAllocator
7740 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7742 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7744 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7749 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7752 struct VmaCurrentBudgetData
7754 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7755 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7757 #if VMA_MEMORY_BUDGET
7758 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7759 VMA_RW_MUTEX m_BudgetMutex;
7760 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7761 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7762 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7763 #endif // #if VMA_MEMORY_BUDGET
7765 VmaCurrentBudgetData()
7767 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7769 m_BlockBytes[heapIndex] = 0;
7770 m_AllocationBytes[heapIndex] = 0;
7771 #if VMA_MEMORY_BUDGET
7772 m_VulkanUsage[heapIndex] = 0;
7773 m_VulkanBudget[heapIndex] = 0;
7774 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7778 #if VMA_MEMORY_BUDGET
7779 m_OperationsSinceBudgetFetch = 0;
7783 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7785 m_AllocationBytes[heapIndex] += allocationSize;
7786 #if VMA_MEMORY_BUDGET
7787 ++m_OperationsSinceBudgetFetch;
7791 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7793 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7794 m_AllocationBytes[heapIndex] -= allocationSize;
7795 #if VMA_MEMORY_BUDGET
7796 ++m_OperationsSinceBudgetFetch;
7802 struct VmaAllocator_T
7804 VMA_CLASS_NO_COPY(VmaAllocator_T)
7807 uint32_t m_VulkanApiVersion;
7808 bool m_UseKhrDedicatedAllocation;
7809 bool m_UseKhrBindMemory2;
7810 bool m_UseExtMemoryBudget;
7811 bool m_UseAmdDeviceCoherentMemory;
7812 bool m_UseKhrBufferDeviceAddress;
7814 VkInstance m_hInstance;
7815 bool m_AllocationCallbacksSpecified;
7816 VkAllocationCallbacks m_AllocationCallbacks;
7818 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7821 uint32_t m_HeapSizeLimitMask;
7823 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7824 VkPhysicalDeviceMemoryProperties m_MemProps;
7827 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7830 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7831 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7832 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7834 VmaCurrentBudgetData m_Budget;
7840 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7842 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7846 return m_VulkanFunctions;
7849 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7851 VkDeviceSize GetBufferImageGranularity()
const
7854 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7855 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7858 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7859 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7861 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7863 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7864 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7867 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7869 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7870 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7873 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7875 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7876 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7877 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7880 bool IsIntegratedGpu()
const
7882 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7885 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7887 #if VMA_RECORDING_ENABLED
7888 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7891 void GetBufferMemoryRequirements(
7893 VkMemoryRequirements& memReq,
7894 bool& requiresDedicatedAllocation,
7895 bool& prefersDedicatedAllocation)
const;
7896 void GetImageMemoryRequirements(
7898 VkMemoryRequirements& memReq,
7899 bool& requiresDedicatedAllocation,
7900 bool& prefersDedicatedAllocation)
const;
7903 VkResult AllocateMemory(
7904 const VkMemoryRequirements& vkMemReq,
7905 bool requiresDedicatedAllocation,
7906 bool prefersDedicatedAllocation,
7907 VkBuffer dedicatedBuffer,
7908 VkBufferUsageFlags dedicatedBufferUsage,
7909 VkImage dedicatedImage,
7911 VmaSuballocationType suballocType,
7912 size_t allocationCount,
7917 size_t allocationCount,
7920 VkResult ResizeAllocation(
7922 VkDeviceSize newSize);
7924 void CalculateStats(
VmaStats* pStats);
7927 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7929 #if VMA_STATS_STRING_ENABLED
7930 void PrintDetailedMap(
class VmaJsonWriter& json);
7933 VkResult DefragmentationBegin(
7937 VkResult DefragmentationEnd(
7940 VkResult DefragmentationPassBegin(
7943 VkResult DefragmentationPassEnd(
7950 void DestroyPool(
VmaPool pool);
7953 void SetCurrentFrameIndex(uint32_t frameIndex);
7954 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7956 void MakePoolAllocationsLost(
7958 size_t* pLostAllocationCount);
7959 VkResult CheckPoolCorruption(
VmaPool hPool);
7960 VkResult CheckCorruption(uint32_t memoryTypeBits);
7965 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7967 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7969 VkResult BindVulkanBuffer(
7970 VkDeviceMemory memory,
7971 VkDeviceSize memoryOffset,
7975 VkResult BindVulkanImage(
7976 VkDeviceMemory memory,
7977 VkDeviceSize memoryOffset,
7984 VkResult BindBufferMemory(
7986 VkDeviceSize allocationLocalOffset,
7989 VkResult BindImageMemory(
7991 VkDeviceSize allocationLocalOffset,
7995 VkResult FlushOrInvalidateAllocation(
7997 VkDeviceSize offset, VkDeviceSize size,
7998 VMA_CACHE_OPERATION op);
7999 VkResult FlushOrInvalidateAllocations(
8000 uint32_t allocationCount,
8002 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8003 VMA_CACHE_OPERATION op);
8005 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8011 uint32_t GetGpuDefragmentationMemoryTypeBits();
8014 VkDeviceSize m_PreferredLargeHeapBlockSize;
8016 VkPhysicalDevice m_PhysicalDevice;
8017 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8018 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8020 VMA_RW_MUTEX m_PoolsMutex;
8022 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
8023 uint32_t m_NextPoolId;
8028 uint32_t m_GlobalMemoryTypeBits;
8030 #if VMA_RECORDING_ENABLED
8031 VmaRecorder* m_pRecorder;
8036 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8037 void ImportVulkanFunctions_Static();
8042 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8043 void ImportVulkanFunctions_Dynamic();
8046 void ValidateVulkanFunctions();
8048 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8050 VkResult AllocateMemoryOfType(
8052 VkDeviceSize alignment,
8053 bool dedicatedAllocation,
8054 VkBuffer dedicatedBuffer,
8055 VkBufferUsageFlags dedicatedBufferUsage,
8056 VkImage dedicatedImage,
8058 uint32_t memTypeIndex,
8059 VmaSuballocationType suballocType,
8060 size_t allocationCount,
8064 VkResult AllocateDedicatedMemoryPage(
8066 VmaSuballocationType suballocType,
8067 uint32_t memTypeIndex,
8068 const VkMemoryAllocateInfo& allocInfo,
8070 bool isUserDataString,
8075 VkResult AllocateDedicatedMemory(
8077 VmaSuballocationType suballocType,
8078 uint32_t memTypeIndex,
8081 bool isUserDataString,
8083 VkBuffer dedicatedBuffer,
8084 VkBufferUsageFlags dedicatedBufferUsage,
8085 VkImage dedicatedImage,
8086 size_t allocationCount,
8095 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8097 uint32_t CalculateGlobalMemoryTypeBits()
const;
8099 bool GetFlushOrInvalidateRange(
8101 VkDeviceSize offset, VkDeviceSize size,
8102 VkMappedMemoryRange& outRange)
const;
8104 #if VMA_MEMORY_BUDGET
8105 void UpdateVulkanBudget();
8106 #endif // #if VMA_MEMORY_BUDGET
8112 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8114 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8117 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8119 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8122 template<
typename T>
8125 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8128 template<
typename T>
8129 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8131 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8134 template<
typename T>
8135 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8140 VmaFree(hAllocator, ptr);
8144 template<
typename T>
8145 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8149 for(
size_t i = count; i--; )
8151 VmaFree(hAllocator, ptr);
8158 #if VMA_STATS_STRING_ENABLED
8160 class VmaStringBuilder
8163 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8164 size_t GetLength()
const {
return m_Data.size(); }
8165 const char* GetData()
const {
return m_Data.data(); }
8167 void Add(
char ch) { m_Data.push_back(ch); }
8168 void Add(
const char* pStr);
8169 void AddNewLine() { Add(
'\n'); }
8170 void AddNumber(uint32_t num);
8171 void AddNumber(uint64_t num);
8172 void AddPointer(
const void* ptr);
8175 VmaVector< char, VmaStlAllocator<char> > m_Data;
8178 void VmaStringBuilder::Add(
const char* pStr)
8180 const size_t strLen = strlen(pStr);
8183 const size_t oldCount = m_Data.size();
8184 m_Data.resize(oldCount + strLen);
8185 memcpy(m_Data.data() + oldCount, pStr, strLen);
8189 void VmaStringBuilder::AddNumber(uint32_t num)
8196 *--p =
'0' + (num % 10);
8203 void VmaStringBuilder::AddNumber(uint64_t num)
8210 *--p =
'0' + (num % 10);
8217 void VmaStringBuilder::AddPointer(
const void* ptr)
8220 VmaPtrToStr(buf,
sizeof(buf), ptr);
8224 #endif // #if VMA_STATS_STRING_ENABLED
8229 #if VMA_STATS_STRING_ENABLED
8233 VMA_CLASS_NO_COPY(VmaJsonWriter)
8235 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8238 void BeginObject(
bool singleLine =
false);
8241 void BeginArray(
bool singleLine =
false);
8244 void WriteString(
const char* pStr);
8245 void BeginString(
const char* pStr = VMA_NULL);
8246 void ContinueString(
const char* pStr);
8247 void ContinueString(uint32_t n);
8248 void ContinueString(uint64_t n);
8249 void ContinueString_Pointer(
const void* ptr);
8250 void EndString(
const char* pStr = VMA_NULL);
8252 void WriteNumber(uint32_t n);
8253 void WriteNumber(uint64_t n);
8254 void WriteBool(
bool b);
8258 static const char*
const INDENT;
8260 enum COLLECTION_TYPE
8262 COLLECTION_TYPE_OBJECT,
8263 COLLECTION_TYPE_ARRAY,
8267 COLLECTION_TYPE type;
8268 uint32_t valueCount;
8269 bool singleLineMode;
8272 VmaStringBuilder& m_SB;
8273 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8274 bool m_InsideString;
8276 void BeginValue(
bool isString);
8277 void WriteIndent(
bool oneLess =
false);
8280 const char*
const VmaJsonWriter::INDENT =
" ";
8282 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8284 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8285 m_InsideString(false)
8289 VmaJsonWriter::~VmaJsonWriter()
8291 VMA_ASSERT(!m_InsideString);
8292 VMA_ASSERT(m_Stack.empty());
8295 void VmaJsonWriter::BeginObject(
bool singleLine)
8297 VMA_ASSERT(!m_InsideString);
8303 item.type = COLLECTION_TYPE_OBJECT;
8304 item.valueCount = 0;
8305 item.singleLineMode = singleLine;
8306 m_Stack.push_back(item);
8309 void VmaJsonWriter::EndObject()
8311 VMA_ASSERT(!m_InsideString);
8316 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8320 void VmaJsonWriter::BeginArray(
bool singleLine)
8322 VMA_ASSERT(!m_InsideString);
8328 item.type = COLLECTION_TYPE_ARRAY;
8329 item.valueCount = 0;
8330 item.singleLineMode = singleLine;
8331 m_Stack.push_back(item);
8334 void VmaJsonWriter::EndArray()
8336 VMA_ASSERT(!m_InsideString);
8341 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8345 void VmaJsonWriter::WriteString(
const char* pStr)
8351 void VmaJsonWriter::BeginString(
const char* pStr)
8353 VMA_ASSERT(!m_InsideString);
8357 m_InsideString =
true;
8358 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8360 ContinueString(pStr);
8364 void VmaJsonWriter::ContinueString(
const char* pStr)
8366 VMA_ASSERT(m_InsideString);
8368 const size_t strLen = strlen(pStr);
8369 for(
size_t i = 0; i < strLen; ++i)
8402 VMA_ASSERT(0 &&
"Character not currently supported.");
8408 void VmaJsonWriter::ContinueString(uint32_t n)
8410 VMA_ASSERT(m_InsideString);
8414 void VmaJsonWriter::ContinueString(uint64_t n)
8416 VMA_ASSERT(m_InsideString);
8420 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8422 VMA_ASSERT(m_InsideString);
8423 m_SB.AddPointer(ptr);
8426 void VmaJsonWriter::EndString(
const char* pStr)
8428 VMA_ASSERT(m_InsideString);
8429 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8431 ContinueString(pStr);
8434 m_InsideString =
false;
8437 void VmaJsonWriter::WriteNumber(uint32_t n)
8439 VMA_ASSERT(!m_InsideString);
8444 void VmaJsonWriter::WriteNumber(uint64_t n)
8446 VMA_ASSERT(!m_InsideString);
8451 void VmaJsonWriter::WriteBool(
bool b)
8453 VMA_ASSERT(!m_InsideString);
8455 m_SB.Add(b ?
"true" :
"false");
8458 void VmaJsonWriter::WriteNull()
8460 VMA_ASSERT(!m_InsideString);
8465 void VmaJsonWriter::BeginValue(
bool isString)
8467 if(!m_Stack.empty())
8469 StackItem& currItem = m_Stack.back();
8470 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8471 currItem.valueCount % 2 == 0)
8473 VMA_ASSERT(isString);
8476 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8477 currItem.valueCount % 2 != 0)
8481 else if(currItem.valueCount > 0)
8490 ++currItem.valueCount;
8494 void VmaJsonWriter::WriteIndent(
bool oneLess)
8496 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8500 size_t count = m_Stack.size();
8501 if(count > 0 && oneLess)
8505 for(
size_t i = 0; i < count; ++i)
8512 #endif // #if VMA_STATS_STRING_ENABLED
8516 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8518 if(IsUserDataString())
8520 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8522 FreeUserDataString(hAllocator);
8524 if(pUserData != VMA_NULL)
8526 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8531 m_pUserData = pUserData;
8535 void VmaAllocation_T::ChangeBlockAllocation(
8537 VmaDeviceMemoryBlock* block,
8538 VkDeviceSize offset)
8540 VMA_ASSERT(block != VMA_NULL);
8541 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8544 if(block != m_BlockAllocation.m_Block)
8546 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8547 if(IsPersistentMap())
8549 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8550 block->Map(hAllocator, mapRefCount, VMA_NULL);
8553 m_BlockAllocation.m_Block = block;
8554 m_BlockAllocation.m_Offset = offset;
8557 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8559 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8560 m_BlockAllocation.m_Offset = newOffset;
8563 VkDeviceSize VmaAllocation_T::GetOffset()
const
8567 case ALLOCATION_TYPE_BLOCK:
8568 return m_BlockAllocation.m_Offset;
8569 case ALLOCATION_TYPE_DEDICATED:
8577 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8581 case ALLOCATION_TYPE_BLOCK:
8582 return m_BlockAllocation.m_Block->GetDeviceMemory();
8583 case ALLOCATION_TYPE_DEDICATED:
8584 return m_DedicatedAllocation.m_hMemory;
8587 return VK_NULL_HANDLE;
8591 void* VmaAllocation_T::GetMappedData()
const
8595 case ALLOCATION_TYPE_BLOCK:
8598 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8599 VMA_ASSERT(pBlockData != VMA_NULL);
8600 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8607 case ALLOCATION_TYPE_DEDICATED:
8608 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8609 return m_DedicatedAllocation.m_pMappedData;
8616 bool VmaAllocation_T::CanBecomeLost()
const
8620 case ALLOCATION_TYPE_BLOCK:
8621 return m_BlockAllocation.m_CanBecomeLost;
8622 case ALLOCATION_TYPE_DEDICATED:
8630 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8632 VMA_ASSERT(CanBecomeLost());
8638 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8641 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8646 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8652 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8662 #if VMA_STATS_STRING_ENABLED
8665 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8674 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8676 json.WriteString(
"Type");
8677 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8679 json.WriteString(
"Size");
8680 json.WriteNumber(m_Size);
8682 if(m_pUserData != VMA_NULL)
8684 json.WriteString(
"UserData");
8685 if(IsUserDataString())
8687 json.WriteString((
const char*)m_pUserData);
8692 json.ContinueString_Pointer(m_pUserData);
8697 json.WriteString(
"CreationFrameIndex");
8698 json.WriteNumber(m_CreationFrameIndex);
8700 json.WriteString(
"LastUseFrameIndex");
8701 json.WriteNumber(GetLastUseFrameIndex());
8703 if(m_BufferImageUsage != 0)
8705 json.WriteString(
"Usage");
8706 json.WriteNumber(m_BufferImageUsage);
8712 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8714 VMA_ASSERT(IsUserDataString());
8715 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8716 m_pUserData = VMA_NULL;
8719 void VmaAllocation_T::BlockAllocMap()
8721 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8723 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8729 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8733 void VmaAllocation_T::BlockAllocUnmap()
8735 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8737 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8743 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8747 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8749 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8753 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8755 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8756 *ppData = m_DedicatedAllocation.m_pMappedData;
8762 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8763 return VK_ERROR_MEMORY_MAP_FAILED;
8768 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8769 hAllocator->m_hDevice,
8770 m_DedicatedAllocation.m_hMemory,
8775 if(result == VK_SUCCESS)
8777 m_DedicatedAllocation.m_pMappedData = *ppData;
8784 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8786 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8788 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8793 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8794 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8795 hAllocator->m_hDevice,
8796 m_DedicatedAllocation.m_hMemory);
8801 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8805 #if VMA_STATS_STRING_ENABLED
8807 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8811 json.WriteString(
"Blocks");
8814 json.WriteString(
"Allocations");
8817 json.WriteString(
"UnusedRanges");
8820 json.WriteString(
"UsedBytes");
8823 json.WriteString(
"UnusedBytes");
8828 json.WriteString(
"AllocationSize");
8829 json.BeginObject(
true);
8830 json.WriteString(
"Min");
8832 json.WriteString(
"Avg");
8834 json.WriteString(
"Max");
8841 json.WriteString(
"UnusedRangeSize");
8842 json.BeginObject(
true);
8843 json.WriteString(
"Min");
8845 json.WriteString(
"Avg");
8847 json.WriteString(
"Max");
8855 #endif // #if VMA_STATS_STRING_ENABLED
8857 struct VmaSuballocationItemSizeLess
8860 const VmaSuballocationList::iterator lhs,
8861 const VmaSuballocationList::iterator rhs)
const
8863 return lhs->size < rhs->size;
8866 const VmaSuballocationList::iterator lhs,
8867 VkDeviceSize rhsSize)
const
8869 return lhs->size < rhsSize;
8877 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8879 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8883 #if VMA_STATS_STRING_ENABLED
8885 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8886 VkDeviceSize unusedBytes,
8887 size_t allocationCount,
8888 size_t unusedRangeCount)
const
8892 json.WriteString(
"TotalBytes");
8893 json.WriteNumber(GetSize());
8895 json.WriteString(
"UnusedBytes");
8896 json.WriteNumber(unusedBytes);
8898 json.WriteString(
"Allocations");
8899 json.WriteNumber((uint64_t)allocationCount);
8901 json.WriteString(
"UnusedRanges");
8902 json.WriteNumber((uint64_t)unusedRangeCount);
8904 json.WriteString(
"Suballocations");
8908 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8909 VkDeviceSize offset,
8912 json.BeginObject(
true);
8914 json.WriteString(
"Offset");
8915 json.WriteNumber(offset);
8917 hAllocation->PrintParameters(json);
8922 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8923 VkDeviceSize offset,
8924 VkDeviceSize size)
const
8926 json.BeginObject(
true);
8928 json.WriteString(
"Offset");
8929 json.WriteNumber(offset);
8931 json.WriteString(
"Type");
8932 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8934 json.WriteString(
"Size");
8935 json.WriteNumber(size);
8940 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8946 #endif // #if VMA_STATS_STRING_ENABLED
8951 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8952 VmaBlockMetadata(hAllocator),
8955 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8956 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8960 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8964 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8966 VmaBlockMetadata::Init(size);
8969 m_SumFreeSize = size;
8971 VmaSuballocation suballoc = {};
8972 suballoc.offset = 0;
8973 suballoc.size = size;
8974 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8975 suballoc.hAllocation = VK_NULL_HANDLE;
8977 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8978 m_Suballocations.push_back(suballoc);
8979 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8981 m_FreeSuballocationsBySize.push_back(suballocItem);
8984 bool VmaBlockMetadata_Generic::Validate()
const
8986 VMA_VALIDATE(!m_Suballocations.empty());
8989 VkDeviceSize calculatedOffset = 0;
8991 uint32_t calculatedFreeCount = 0;
8993 VkDeviceSize calculatedSumFreeSize = 0;
8996 size_t freeSuballocationsToRegister = 0;
8998 bool prevFree =
false;
9000 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9001 suballocItem != m_Suballocations.cend();
9004 const VmaSuballocation& subAlloc = *suballocItem;
9007 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9009 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9011 VMA_VALIDATE(!prevFree || !currFree);
9013 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9017 calculatedSumFreeSize += subAlloc.size;
9018 ++calculatedFreeCount;
9019 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9021 ++freeSuballocationsToRegister;
9025 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9029 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9030 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9033 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9036 calculatedOffset += subAlloc.size;
9037 prevFree = currFree;
9042 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9044 VkDeviceSize lastSize = 0;
9045 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9047 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9050 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9052 VMA_VALIDATE(suballocItem->size >= lastSize);
9054 lastSize = suballocItem->size;
9058 VMA_VALIDATE(ValidateFreeSuballocationList());
9059 VMA_VALIDATE(calculatedOffset == GetSize());
9060 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9061 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9066 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9068 if(!m_FreeSuballocationsBySize.empty())
9070 return m_FreeSuballocationsBySize.back()->size;
9078 bool VmaBlockMetadata_Generic::IsEmpty()
const
9080 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9083 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9087 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9099 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9100 suballocItem != m_Suballocations.cend();
9103 const VmaSuballocation& suballoc = *suballocItem;
9104 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9117 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9119 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9121 inoutStats.
size += GetSize();
9128 #if VMA_STATS_STRING_ENABLED
9130 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9132 PrintDetailedMap_Begin(json,
9134 m_Suballocations.size() - (
size_t)m_FreeCount,
9138 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9139 suballocItem != m_Suballocations.cend();
9140 ++suballocItem, ++i)
9142 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9144 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9148 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9152 PrintDetailedMap_End(json);
9155 #endif // #if VMA_STATS_STRING_ENABLED
9157 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9158 uint32_t currentFrameIndex,
9159 uint32_t frameInUseCount,
9160 VkDeviceSize bufferImageGranularity,
9161 VkDeviceSize allocSize,
9162 VkDeviceSize allocAlignment,
9164 VmaSuballocationType allocType,
9165 bool canMakeOtherLost,
9167 VmaAllocationRequest* pAllocationRequest)
9169 VMA_ASSERT(allocSize > 0);
9170 VMA_ASSERT(!upperAddress);
9171 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9172 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9173 VMA_HEAVY_ASSERT(Validate());
9175 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9178 if(canMakeOtherLost ==
false &&
9179 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9185 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9186 if(freeSuballocCount > 0)
9191 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9192 m_FreeSuballocationsBySize.data(),
9193 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9194 allocSize + 2 * VMA_DEBUG_MARGIN,
9195 VmaSuballocationItemSizeLess());
9196 size_t index = it - m_FreeSuballocationsBySize.data();
9197 for(; index < freeSuballocCount; ++index)
9202 bufferImageGranularity,
9206 m_FreeSuballocationsBySize[index],
9208 &pAllocationRequest->offset,
9209 &pAllocationRequest->itemsToMakeLostCount,
9210 &pAllocationRequest->sumFreeSize,
9211 &pAllocationRequest->sumItemSize))
9213 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9218 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9220 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9221 it != m_Suballocations.end();
9224 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9227 bufferImageGranularity,
9233 &pAllocationRequest->offset,
9234 &pAllocationRequest->itemsToMakeLostCount,
9235 &pAllocationRequest->sumFreeSize,
9236 &pAllocationRequest->sumItemSize))
9238 pAllocationRequest->item = it;
9246 for(
size_t index = freeSuballocCount; index--; )
9251 bufferImageGranularity,
9255 m_FreeSuballocationsBySize[index],
9257 &pAllocationRequest->offset,
9258 &pAllocationRequest->itemsToMakeLostCount,
9259 &pAllocationRequest->sumFreeSize,
9260 &pAllocationRequest->sumItemSize))
9262 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9269 if(canMakeOtherLost)
9274 VmaAllocationRequest tmpAllocRequest = {};
9275 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9276 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9277 suballocIt != m_Suballocations.end();
9280 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9281 suballocIt->hAllocation->CanBecomeLost())
9286 bufferImageGranularity,
9292 &tmpAllocRequest.offset,
9293 &tmpAllocRequest.itemsToMakeLostCount,
9294 &tmpAllocRequest.sumFreeSize,
9295 &tmpAllocRequest.sumItemSize))
9299 *pAllocationRequest = tmpAllocRequest;
9300 pAllocationRequest->item = suballocIt;
9303 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9305 *pAllocationRequest = tmpAllocRequest;
9306 pAllocationRequest->item = suballocIt;
9319 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9320 uint32_t currentFrameIndex,
9321 uint32_t frameInUseCount,
9322 VmaAllocationRequest* pAllocationRequest)
9324 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9326 while(pAllocationRequest->itemsToMakeLostCount > 0)
9328 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9330 ++pAllocationRequest->item;
9332 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9333 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9334 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9335 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9337 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9338 --pAllocationRequest->itemsToMakeLostCount;
9346 VMA_HEAVY_ASSERT(Validate());
9347 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9348 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9353 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9355 uint32_t lostAllocationCount = 0;
9356 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9357 it != m_Suballocations.end();
9360 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9361 it->hAllocation->CanBecomeLost() &&
9362 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9364 it = FreeSuballocation(it);
9365 ++lostAllocationCount;
9368 return lostAllocationCount;
9371 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9373 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9374 it != m_Suballocations.end();
9377 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9379 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9381 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9382 return VK_ERROR_VALIDATION_FAILED_EXT;
9384 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9386 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9387 return VK_ERROR_VALIDATION_FAILED_EXT;
9395 void VmaBlockMetadata_Generic::Alloc(
9396 const VmaAllocationRequest& request,
9397 VmaSuballocationType type,
9398 VkDeviceSize allocSize,
9401 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9402 VMA_ASSERT(request.item != m_Suballocations.end());
9403 VmaSuballocation& suballoc = *request.item;
9405 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9407 VMA_ASSERT(request.offset >= suballoc.offset);
9408 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9409 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9410 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9414 UnregisterFreeSuballocation(request.item);
9416 suballoc.offset = request.offset;
9417 suballoc.size = allocSize;
9418 suballoc.type = type;
9419 suballoc.hAllocation = hAllocation;
9424 VmaSuballocation paddingSuballoc = {};
9425 paddingSuballoc.offset = request.offset + allocSize;
9426 paddingSuballoc.size = paddingEnd;
9427 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9428 VmaSuballocationList::iterator next = request.item;
9430 const VmaSuballocationList::iterator paddingEndItem =
9431 m_Suballocations.insert(next, paddingSuballoc);
9432 RegisterFreeSuballocation(paddingEndItem);
9438 VmaSuballocation paddingSuballoc = {};
9439 paddingSuballoc.offset = request.offset - paddingBegin;
9440 paddingSuballoc.size = paddingBegin;
9441 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9442 const VmaSuballocationList::iterator paddingBeginItem =
9443 m_Suballocations.insert(request.item, paddingSuballoc);
9444 RegisterFreeSuballocation(paddingBeginItem);
9448 m_FreeCount = m_FreeCount - 1;
9449 if(paddingBegin > 0)
9457 m_SumFreeSize -= allocSize;
9460 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9462 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9463 suballocItem != m_Suballocations.end();
9466 VmaSuballocation& suballoc = *suballocItem;
9467 if(suballoc.hAllocation == allocation)
9469 FreeSuballocation(suballocItem);
9470 VMA_HEAVY_ASSERT(Validate());
9474 VMA_ASSERT(0 &&
"Not found!");
9477 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9479 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9480 suballocItem != m_Suballocations.end();
9483 VmaSuballocation& suballoc = *suballocItem;
9484 if(suballoc.offset == offset)
9486 FreeSuballocation(suballocItem);
9490 VMA_ASSERT(0 &&
"Not found!");
9493 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9495 VkDeviceSize lastSize = 0;
9496 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9498 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9500 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9501 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9502 VMA_VALIDATE(it->size >= lastSize);
9503 lastSize = it->size;
9508 bool VmaBlockMetadata_Generic::CheckAllocation(
9509 uint32_t currentFrameIndex,
9510 uint32_t frameInUseCount,
9511 VkDeviceSize bufferImageGranularity,
9512 VkDeviceSize allocSize,
9513 VkDeviceSize allocAlignment,
9514 VmaSuballocationType allocType,
9515 VmaSuballocationList::const_iterator suballocItem,
9516 bool canMakeOtherLost,
9517 VkDeviceSize* pOffset,
9518 size_t* itemsToMakeLostCount,
9519 VkDeviceSize* pSumFreeSize,
9520 VkDeviceSize* pSumItemSize)
const
9522 VMA_ASSERT(allocSize > 0);
9523 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9524 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9525 VMA_ASSERT(pOffset != VMA_NULL);
9527 *itemsToMakeLostCount = 0;
9531 if(canMakeOtherLost)
9533 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9535 *pSumFreeSize = suballocItem->size;
9539 if(suballocItem->hAllocation->CanBecomeLost() &&
9540 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9542 ++*itemsToMakeLostCount;
9543 *pSumItemSize = suballocItem->size;
9552 if(GetSize() - suballocItem->offset < allocSize)
9558 *pOffset = suballocItem->offset;
9561 if(VMA_DEBUG_MARGIN > 0)
9563 *pOffset += VMA_DEBUG_MARGIN;
9567 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9571 if(bufferImageGranularity > 1)
9573 bool bufferImageGranularityConflict =
false;
9574 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9575 while(prevSuballocItem != m_Suballocations.cbegin())
9578 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9579 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9581 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9583 bufferImageGranularityConflict =
true;
9591 if(bufferImageGranularityConflict)
9593 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9599 if(*pOffset >= suballocItem->offset + suballocItem->size)
9605 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9608 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9610 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9612 if(suballocItem->offset + totalSize > GetSize())
9619 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9620 if(totalSize > suballocItem->size)
9622 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9623 while(remainingSize > 0)
9626 if(lastSuballocItem == m_Suballocations.cend())
9630 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9632 *pSumFreeSize += lastSuballocItem->size;
9636 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9637 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9638 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9640 ++*itemsToMakeLostCount;
9641 *pSumItemSize += lastSuballocItem->size;
9648 remainingSize = (lastSuballocItem->size < remainingSize) ?
9649 remainingSize - lastSuballocItem->size : 0;
9655 if(bufferImageGranularity > 1)
9657 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9659 while(nextSuballocItem != m_Suballocations.cend())
9661 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9662 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9664 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9666 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9667 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9668 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9670 ++*itemsToMakeLostCount;
9689 const VmaSuballocation& suballoc = *suballocItem;
9690 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9692 *pSumFreeSize = suballoc.size;
9695 if(suballoc.size < allocSize)
9701 *pOffset = suballoc.offset;
9704 if(VMA_DEBUG_MARGIN > 0)
9706 *pOffset += VMA_DEBUG_MARGIN;
9710 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9714 if(bufferImageGranularity > 1)
9716 bool bufferImageGranularityConflict =
false;
9717 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9718 while(prevSuballocItem != m_Suballocations.cbegin())
9721 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9722 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9724 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9726 bufferImageGranularityConflict =
true;
9734 if(bufferImageGranularityConflict)
9736 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9741 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9744 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9747 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9754 if(bufferImageGranularity > 1)
9756 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9758 while(nextSuballocItem != m_Suballocations.cend())
9760 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9761 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9763 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9782 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9784 VMA_ASSERT(item != m_Suballocations.end());
9785 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9787 VmaSuballocationList::iterator nextItem = item;
9789 VMA_ASSERT(nextItem != m_Suballocations.end());
9790 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9792 item->size += nextItem->size;
9794 m_Suballocations.erase(nextItem);
9797 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9800 VmaSuballocation& suballoc = *suballocItem;
9801 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9802 suballoc.hAllocation = VK_NULL_HANDLE;
9806 m_SumFreeSize += suballoc.size;
9809 bool mergeWithNext =
false;
9810 bool mergeWithPrev =
false;
9812 VmaSuballocationList::iterator nextItem = suballocItem;
9814 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9816 mergeWithNext =
true;
9819 VmaSuballocationList::iterator prevItem = suballocItem;
9820 if(suballocItem != m_Suballocations.begin())
9823 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9825 mergeWithPrev =
true;
9831 UnregisterFreeSuballocation(nextItem);
9832 MergeFreeWithNext(suballocItem);
9837 UnregisterFreeSuballocation(prevItem);
9838 MergeFreeWithNext(prevItem);
9839 RegisterFreeSuballocation(prevItem);
9844 RegisterFreeSuballocation(suballocItem);
9845 return suballocItem;
9849 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9851 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9852 VMA_ASSERT(item->size > 0);
9856 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9858 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9860 if(m_FreeSuballocationsBySize.empty())
9862 m_FreeSuballocationsBySize.push_back(item);
9866 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9874 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9876 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9877 VMA_ASSERT(item->size > 0);
9881 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9883 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9885 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9886 m_FreeSuballocationsBySize.data(),
9887 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9889 VmaSuballocationItemSizeLess());
9890 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9891 index < m_FreeSuballocationsBySize.size();
9894 if(m_FreeSuballocationsBySize[index] == item)
9896 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9899 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9901 VMA_ASSERT(0 &&
"Not found.");
9907 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9908 VkDeviceSize bufferImageGranularity,
9909 VmaSuballocationType& inOutPrevSuballocType)
const
9911 if(bufferImageGranularity == 1 || IsEmpty())
9916 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9917 bool typeConflictFound =
false;
9918 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9919 it != m_Suballocations.cend();
9922 const VmaSuballocationType suballocType = it->type;
9923 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9925 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9926 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9928 typeConflictFound =
true;
9930 inOutPrevSuballocType = suballocType;
9934 return typeConflictFound || minAlignment >= bufferImageGranularity;
9940 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9941 VmaBlockMetadata(hAllocator),
9943 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9944 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9945 m_1stVectorIndex(0),
9946 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9947 m_1stNullItemsBeginCount(0),
9948 m_1stNullItemsMiddleCount(0),
9949 m_2ndNullItemsCount(0)
9953 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9957 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9959 VmaBlockMetadata::Init(size);
9960 m_SumFreeSize = size;
9963 bool VmaBlockMetadata_Linear::Validate()
const
9965 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9966 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9968 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9969 VMA_VALIDATE(!suballocations1st.empty() ||
9970 suballocations2nd.empty() ||
9971 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9973 if(!suballocations1st.empty())
9976 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9978 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9980 if(!suballocations2nd.empty())
9983 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9986 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9987 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9989 VkDeviceSize sumUsedSize = 0;
9990 const size_t suballoc1stCount = suballocations1st.size();
9991 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9993 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9995 const size_t suballoc2ndCount = suballocations2nd.size();
9996 size_t nullItem2ndCount = 0;
9997 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9999 const VmaSuballocation& suballoc = suballocations2nd[i];
10000 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10002 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10003 VMA_VALIDATE(suballoc.offset >= offset);
10007 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10008 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10009 sumUsedSize += suballoc.size;
10013 ++nullItem2ndCount;
10016 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10019 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10022 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10024 const VmaSuballocation& suballoc = suballocations1st[i];
10025 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10026 suballoc.hAllocation == VK_NULL_HANDLE);
10029 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10031 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10033 const VmaSuballocation& suballoc = suballocations1st[i];
10034 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10036 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10037 VMA_VALIDATE(suballoc.offset >= offset);
10038 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10042 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10043 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10044 sumUsedSize += suballoc.size;
10048 ++nullItem1stCount;
10051 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10053 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10055 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10057 const size_t suballoc2ndCount = suballocations2nd.size();
10058 size_t nullItem2ndCount = 0;
10059 for(
size_t i = suballoc2ndCount; i--; )
10061 const VmaSuballocation& suballoc = suballocations2nd[i];
10062 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10064 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10065 VMA_VALIDATE(suballoc.offset >= offset);
10069 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10070 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10071 sumUsedSize += suballoc.size;
10075 ++nullItem2ndCount;
10078 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10081 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10084 VMA_VALIDATE(offset <= GetSize());
10085 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10090 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10092 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10093 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10096 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10098 const VkDeviceSize size = GetSize();
10110 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10112 switch(m_2ndVectorMode)
10114 case SECOND_VECTOR_EMPTY:
10120 const size_t suballocations1stCount = suballocations1st.size();
10121 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10122 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10123 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10125 firstSuballoc.offset,
10126 size - (lastSuballoc.offset + lastSuballoc.size));
10130 case SECOND_VECTOR_RING_BUFFER:
10135 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10136 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10137 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10138 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10142 case SECOND_VECTOR_DOUBLE_STACK:
10147 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10148 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10149 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10150 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10160 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10162 const VkDeviceSize size = GetSize();
10163 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10164 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10165 const size_t suballoc1stCount = suballocations1st.size();
10166 const size_t suballoc2ndCount = suballocations2nd.size();
10177 VkDeviceSize lastOffset = 0;
10179 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10181 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10182 size_t nextAlloc2ndIndex = 0;
10183 while(lastOffset < freeSpace2ndTo1stEnd)
10186 while(nextAlloc2ndIndex < suballoc2ndCount &&
10187 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10189 ++nextAlloc2ndIndex;
10193 if(nextAlloc2ndIndex < suballoc2ndCount)
10195 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10198 if(lastOffset < suballoc.offset)
10201 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10215 lastOffset = suballoc.offset + suballoc.size;
10216 ++nextAlloc2ndIndex;
10222 if(lastOffset < freeSpace2ndTo1stEnd)
10224 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10232 lastOffset = freeSpace2ndTo1stEnd;
10237 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10238 const VkDeviceSize freeSpace1stTo2ndEnd =
10239 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10240 while(lastOffset < freeSpace1stTo2ndEnd)
10243 while(nextAlloc1stIndex < suballoc1stCount &&
10244 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10246 ++nextAlloc1stIndex;
10250 if(nextAlloc1stIndex < suballoc1stCount)
10252 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10255 if(lastOffset < suballoc.offset)
10258 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10272 lastOffset = suballoc.offset + suballoc.size;
10273 ++nextAlloc1stIndex;
10279 if(lastOffset < freeSpace1stTo2ndEnd)
10281 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10289 lastOffset = freeSpace1stTo2ndEnd;
10293 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10295 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10296 while(lastOffset < size)
10299 while(nextAlloc2ndIndex != SIZE_MAX &&
10300 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10302 --nextAlloc2ndIndex;
10306 if(nextAlloc2ndIndex != SIZE_MAX)
10308 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10311 if(lastOffset < suballoc.offset)
10314 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10328 lastOffset = suballoc.offset + suballoc.size;
10329 --nextAlloc2ndIndex;
10335 if(lastOffset < size)
10337 const VkDeviceSize unusedRangeSize = size - lastOffset;
10353 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10355 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10356 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10357 const VkDeviceSize size = GetSize();
10358 const size_t suballoc1stCount = suballocations1st.size();
10359 const size_t suballoc2ndCount = suballocations2nd.size();
10361 inoutStats.
size += size;
10363 VkDeviceSize lastOffset = 0;
10365 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10367 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10368 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10369 while(lastOffset < freeSpace2ndTo1stEnd)
10372 while(nextAlloc2ndIndex < suballoc2ndCount &&
10373 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10375 ++nextAlloc2ndIndex;
10379 if(nextAlloc2ndIndex < suballoc2ndCount)
10381 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10384 if(lastOffset < suballoc.offset)
10387 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10398 lastOffset = suballoc.offset + suballoc.size;
10399 ++nextAlloc2ndIndex;
10404 if(lastOffset < freeSpace2ndTo1stEnd)
10407 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10414 lastOffset = freeSpace2ndTo1stEnd;
10419 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10420 const VkDeviceSize freeSpace1stTo2ndEnd =
10421 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10422 while(lastOffset < freeSpace1stTo2ndEnd)
10425 while(nextAlloc1stIndex < suballoc1stCount &&
10426 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10428 ++nextAlloc1stIndex;
10432 if(nextAlloc1stIndex < suballoc1stCount)
10434 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10437 if(lastOffset < suballoc.offset)
10440 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10451 lastOffset = suballoc.offset + suballoc.size;
10452 ++nextAlloc1stIndex;
10457 if(lastOffset < freeSpace1stTo2ndEnd)
10460 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10467 lastOffset = freeSpace1stTo2ndEnd;
10471 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10473 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10474 while(lastOffset < size)
10477 while(nextAlloc2ndIndex != SIZE_MAX &&
10478 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10480 --nextAlloc2ndIndex;
10484 if(nextAlloc2ndIndex != SIZE_MAX)
10486 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10489 if(lastOffset < suballoc.offset)
10492 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10503 lastOffset = suballoc.offset + suballoc.size;
10504 --nextAlloc2ndIndex;
10509 if(lastOffset < size)
10512 const VkDeviceSize unusedRangeSize = size - lastOffset;
10525 #if VMA_STATS_STRING_ENABLED
10526 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10528 const VkDeviceSize size = GetSize();
10529 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10530 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10531 const size_t suballoc1stCount = suballocations1st.size();
10532 const size_t suballoc2ndCount = suballocations2nd.size();
10536 size_t unusedRangeCount = 0;
10537 VkDeviceSize usedBytes = 0;
10539 VkDeviceSize lastOffset = 0;
10541 size_t alloc2ndCount = 0;
10542 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10544 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10545 size_t nextAlloc2ndIndex = 0;
10546 while(lastOffset < freeSpace2ndTo1stEnd)
10549 while(nextAlloc2ndIndex < suballoc2ndCount &&
10550 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10552 ++nextAlloc2ndIndex;
10556 if(nextAlloc2ndIndex < suballoc2ndCount)
10558 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10561 if(lastOffset < suballoc.offset)
10564 ++unusedRangeCount;
10570 usedBytes += suballoc.size;
10573 lastOffset = suballoc.offset + suballoc.size;
10574 ++nextAlloc2ndIndex;
10579 if(lastOffset < freeSpace2ndTo1stEnd)
10582 ++unusedRangeCount;
10586 lastOffset = freeSpace2ndTo1stEnd;
10591 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10592 size_t alloc1stCount = 0;
10593 const VkDeviceSize freeSpace1stTo2ndEnd =
10594 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10595 while(lastOffset < freeSpace1stTo2ndEnd)
10598 while(nextAlloc1stIndex < suballoc1stCount &&
10599 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10601 ++nextAlloc1stIndex;
10605 if(nextAlloc1stIndex < suballoc1stCount)
10607 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10610 if(lastOffset < suballoc.offset)
10613 ++unusedRangeCount;
10619 usedBytes += suballoc.size;
10622 lastOffset = suballoc.offset + suballoc.size;
10623 ++nextAlloc1stIndex;
10628 if(lastOffset < size)
10631 ++unusedRangeCount;
10635 lastOffset = freeSpace1stTo2ndEnd;
10639 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10641 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10642 while(lastOffset < size)
10645 while(nextAlloc2ndIndex != SIZE_MAX &&
10646 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10648 --nextAlloc2ndIndex;
10652 if(nextAlloc2ndIndex != SIZE_MAX)
10654 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10657 if(lastOffset < suballoc.offset)
10660 ++unusedRangeCount;
10666 usedBytes += suballoc.size;
10669 lastOffset = suballoc.offset + suballoc.size;
10670 --nextAlloc2ndIndex;
10675 if(lastOffset < size)
10678 ++unusedRangeCount;
10687 const VkDeviceSize unusedBytes = size - usedBytes;
10688 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10693 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10695 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10696 size_t nextAlloc2ndIndex = 0;
10697 while(lastOffset < freeSpace2ndTo1stEnd)
10700 while(nextAlloc2ndIndex < suballoc2ndCount &&
10701 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10703 ++nextAlloc2ndIndex;
10707 if(nextAlloc2ndIndex < suballoc2ndCount)
10709 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10712 if(lastOffset < suballoc.offset)
10715 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10716 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10721 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10724 lastOffset = suballoc.offset + suballoc.size;
10725 ++nextAlloc2ndIndex;
10730 if(lastOffset < freeSpace2ndTo1stEnd)
10733 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10734 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10738 lastOffset = freeSpace2ndTo1stEnd;
10743 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10744 while(lastOffset < freeSpace1stTo2ndEnd)
10747 while(nextAlloc1stIndex < suballoc1stCount &&
10748 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10750 ++nextAlloc1stIndex;
10754 if(nextAlloc1stIndex < suballoc1stCount)
10756 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10759 if(lastOffset < suballoc.offset)
10762 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10763 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10768 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10771 lastOffset = suballoc.offset + suballoc.size;
10772 ++nextAlloc1stIndex;
10777 if(lastOffset < freeSpace1stTo2ndEnd)
10780 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10781 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10785 lastOffset = freeSpace1stTo2ndEnd;
10789 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10791 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10792 while(lastOffset < size)
10795 while(nextAlloc2ndIndex != SIZE_MAX &&
10796 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10798 --nextAlloc2ndIndex;
10802 if(nextAlloc2ndIndex != SIZE_MAX)
10804 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10807 if(lastOffset < suballoc.offset)
10810 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10811 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10816 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10819 lastOffset = suballoc.offset + suballoc.size;
10820 --nextAlloc2ndIndex;
10825 if(lastOffset < size)
10828 const VkDeviceSize unusedRangeSize = size - lastOffset;
10829 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10838 PrintDetailedMap_End(json);
10840 #endif // #if VMA_STATS_STRING_ENABLED
10842 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10843 uint32_t currentFrameIndex,
10844 uint32_t frameInUseCount,
10845 VkDeviceSize bufferImageGranularity,
10846 VkDeviceSize allocSize,
10847 VkDeviceSize allocAlignment,
10849 VmaSuballocationType allocType,
10850 bool canMakeOtherLost,
10852 VmaAllocationRequest* pAllocationRequest)
10854 VMA_ASSERT(allocSize > 0);
10855 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10856 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10857 VMA_HEAVY_ASSERT(Validate());
10858 return upperAddress ?
10859 CreateAllocationRequest_UpperAddress(
10860 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10861 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10862 CreateAllocationRequest_LowerAddress(
10863 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10864 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10867 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10868 uint32_t currentFrameIndex,
10869 uint32_t frameInUseCount,
10870 VkDeviceSize bufferImageGranularity,
10871 VkDeviceSize allocSize,
10872 VkDeviceSize allocAlignment,
10873 VmaSuballocationType allocType,
10874 bool canMakeOtherLost,
10876 VmaAllocationRequest* pAllocationRequest)
10878 const VkDeviceSize size = GetSize();
10879 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10880 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10882 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10884 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10889 if(allocSize > size)
10893 VkDeviceSize resultBaseOffset = size - allocSize;
10894 if(!suballocations2nd.empty())
10896 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10897 resultBaseOffset = lastSuballoc.offset - allocSize;
10898 if(allocSize > lastSuballoc.offset)
10905 VkDeviceSize resultOffset = resultBaseOffset;
10908 if(VMA_DEBUG_MARGIN > 0)
10910 if(resultOffset < VMA_DEBUG_MARGIN)
10914 resultOffset -= VMA_DEBUG_MARGIN;
10918 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10922 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10924 bool bufferImageGranularityConflict =
false;
10925 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10927 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10928 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10930 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10932 bufferImageGranularityConflict =
true;
10940 if(bufferImageGranularityConflict)
10942 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10947 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10948 suballocations1st.back().offset + suballocations1st.back().size :
10950 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10954 if(bufferImageGranularity > 1)
10956 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10958 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10959 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10961 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10975 pAllocationRequest->offset = resultOffset;
10976 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10977 pAllocationRequest->sumItemSize = 0;
10979 pAllocationRequest->itemsToMakeLostCount = 0;
10980 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10987 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10988 uint32_t currentFrameIndex,
10989 uint32_t frameInUseCount,
10990 VkDeviceSize bufferImageGranularity,
10991 VkDeviceSize allocSize,
10992 VkDeviceSize allocAlignment,
10993 VmaSuballocationType allocType,
10994 bool canMakeOtherLost,
10996 VmaAllocationRequest* pAllocationRequest)
10998 const VkDeviceSize size = GetSize();
10999 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11000 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11002 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11006 VkDeviceSize resultBaseOffset = 0;
11007 if(!suballocations1st.empty())
11009 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11010 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11014 VkDeviceSize resultOffset = resultBaseOffset;
11017 if(VMA_DEBUG_MARGIN > 0)
11019 resultOffset += VMA_DEBUG_MARGIN;
11023 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11027 if(bufferImageGranularity > 1 && !suballocations1st.empty())
11029 bool bufferImageGranularityConflict =
false;
11030 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11032 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11033 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11035 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11037 bufferImageGranularityConflict =
true;
11045 if(bufferImageGranularityConflict)
11047 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11051 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11052 suballocations2nd.back().offset : size;
11055 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11059 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11061 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11063 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11064 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11066 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11080 pAllocationRequest->offset = resultOffset;
11081 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11082 pAllocationRequest->sumItemSize = 0;
11084 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11085 pAllocationRequest->itemsToMakeLostCount = 0;
11092 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11094 VMA_ASSERT(!suballocations1st.empty());
11096 VkDeviceSize resultBaseOffset = 0;
11097 if(!suballocations2nd.empty())
11099 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11100 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11104 VkDeviceSize resultOffset = resultBaseOffset;
11107 if(VMA_DEBUG_MARGIN > 0)
11109 resultOffset += VMA_DEBUG_MARGIN;
11113 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11117 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
11119 bool bufferImageGranularityConflict =
false;
11120 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11122 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11123 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11125 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11127 bufferImageGranularityConflict =
true;
11135 if(bufferImageGranularityConflict)
11137 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11141 pAllocationRequest->itemsToMakeLostCount = 0;
11142 pAllocationRequest->sumItemSize = 0;
11143 size_t index1st = m_1stNullItemsBeginCount;
11145 if(canMakeOtherLost)
11147 while(index1st < suballocations1st.size() &&
11148 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11151 const VmaSuballocation& suballoc = suballocations1st[index1st];
11152 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11158 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11159 if(suballoc.hAllocation->CanBecomeLost() &&
11160 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11162 ++pAllocationRequest->itemsToMakeLostCount;
11163 pAllocationRequest->sumItemSize += suballoc.size;
11175 if(bufferImageGranularity > 1)
11177 while(index1st < suballocations1st.size())
11179 const VmaSuballocation& suballoc = suballocations1st[index1st];
11180 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11182 if(suballoc.hAllocation != VK_NULL_HANDLE)
11185 if(suballoc.hAllocation->CanBecomeLost() &&
11186 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11188 ++pAllocationRequest->itemsToMakeLostCount;
11189 pAllocationRequest->sumItemSize += suballoc.size;
11207 if(index1st == suballocations1st.size() &&
11208 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11211 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11216 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11217 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11221 if(bufferImageGranularity > 1)
11223 for(
size_t nextSuballocIndex = index1st;
11224 nextSuballocIndex < suballocations1st.size();
11225 nextSuballocIndex++)
11227 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11228 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11230 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11244 pAllocationRequest->offset = resultOffset;
11245 pAllocationRequest->sumFreeSize =
11246 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11248 - pAllocationRequest->sumItemSize;
11249 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11258 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11259 uint32_t currentFrameIndex,
11260 uint32_t frameInUseCount,
11261 VmaAllocationRequest* pAllocationRequest)
11263 if(pAllocationRequest->itemsToMakeLostCount == 0)
11268 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11271 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11272 size_t index = m_1stNullItemsBeginCount;
11273 size_t madeLostCount = 0;
11274 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11276 if(index == suballocations->size())
11280 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11282 suballocations = &AccessSuballocations2nd();
11286 VMA_ASSERT(!suballocations->empty());
11288 VmaSuballocation& suballoc = (*suballocations)[index];
11289 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11291 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11292 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11293 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11295 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11296 suballoc.hAllocation = VK_NULL_HANDLE;
11297 m_SumFreeSize += suballoc.size;
11298 if(suballocations == &AccessSuballocations1st())
11300 ++m_1stNullItemsMiddleCount;
11304 ++m_2ndNullItemsCount;
11316 CleanupAfterFree();
11322 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11324 uint32_t lostAllocationCount = 0;
11326 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11327 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11329 VmaSuballocation& suballoc = suballocations1st[i];
11330 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11331 suballoc.hAllocation->CanBecomeLost() &&
11332 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11334 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11335 suballoc.hAllocation = VK_NULL_HANDLE;
11336 ++m_1stNullItemsMiddleCount;
11337 m_SumFreeSize += suballoc.size;
11338 ++lostAllocationCount;
11342 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11343 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11345 VmaSuballocation& suballoc = suballocations2nd[i];
11346 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11347 suballoc.hAllocation->CanBecomeLost() &&
11348 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11350 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11351 suballoc.hAllocation = VK_NULL_HANDLE;
11352 ++m_2ndNullItemsCount;
11353 m_SumFreeSize += suballoc.size;
11354 ++lostAllocationCount;
11358 if(lostAllocationCount)
11360 CleanupAfterFree();
11363 return lostAllocationCount;
11366 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11368 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11369 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11371 const VmaSuballocation& suballoc = suballocations1st[i];
11372 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11374 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11376 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11377 return VK_ERROR_VALIDATION_FAILED_EXT;
11379 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11381 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11382 return VK_ERROR_VALIDATION_FAILED_EXT;
11387 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11388 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11390 const VmaSuballocation& suballoc = suballocations2nd[i];
11391 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11393 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11395 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11396 return VK_ERROR_VALIDATION_FAILED_EXT;
11398 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11400 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11401 return VK_ERROR_VALIDATION_FAILED_EXT;
11409 void VmaBlockMetadata_Linear::Alloc(
11410 const VmaAllocationRequest& request,
11411 VmaSuballocationType type,
11412 VkDeviceSize allocSize,
11415 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11417 switch(request.type)
11419 case VmaAllocationRequestType::UpperAddress:
11421 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11422 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11423 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11424 suballocations2nd.push_back(newSuballoc);
11425 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11428 case VmaAllocationRequestType::EndOf1st:
11430 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11432 VMA_ASSERT(suballocations1st.empty() ||
11433 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11435 VMA_ASSERT(request.offset + allocSize <= GetSize());
11437 suballocations1st.push_back(newSuballoc);
11440 case VmaAllocationRequestType::EndOf2nd:
11442 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11444 VMA_ASSERT(!suballocations1st.empty() &&
11445 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11446 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11448 switch(m_2ndVectorMode)
11450 case SECOND_VECTOR_EMPTY:
11452 VMA_ASSERT(suballocations2nd.empty());
11453 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11455 case SECOND_VECTOR_RING_BUFFER:
11457 VMA_ASSERT(!suballocations2nd.empty());
11459 case SECOND_VECTOR_DOUBLE_STACK:
11460 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11466 suballocations2nd.push_back(newSuballoc);
11470 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11473 m_SumFreeSize -= newSuballoc.size;
11476 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11478 FreeAtOffset(allocation->GetOffset());
11481 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11483 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11484 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11486 if(!suballocations1st.empty())
11489 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11490 if(firstSuballoc.offset == offset)
11492 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11493 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11494 m_SumFreeSize += firstSuballoc.size;
11495 ++m_1stNullItemsBeginCount;
11496 CleanupAfterFree();
11502 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11503 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11505 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11506 if(lastSuballoc.offset == offset)
11508 m_SumFreeSize += lastSuballoc.size;
11509 suballocations2nd.pop_back();
11510 CleanupAfterFree();
11515 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11517 VmaSuballocation& lastSuballoc = suballocations1st.back();
11518 if(lastSuballoc.offset == offset)
11520 m_SumFreeSize += lastSuballoc.size;
11521 suballocations1st.pop_back();
11522 CleanupAfterFree();
11529 VmaSuballocation refSuballoc;
11530 refSuballoc.offset = offset;
11532 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11533 suballocations1st.begin() + m_1stNullItemsBeginCount,
11534 suballocations1st.end(),
11536 VmaSuballocationOffsetLess());
11537 if(it != suballocations1st.end())
11539 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11540 it->hAllocation = VK_NULL_HANDLE;
11541 ++m_1stNullItemsMiddleCount;
11542 m_SumFreeSize += it->size;
11543 CleanupAfterFree();
11548 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11551 VmaSuballocation refSuballoc;
11552 refSuballoc.offset = offset;
11554 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11555 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11556 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11557 if(it != suballocations2nd.end())
11559 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11560 it->hAllocation = VK_NULL_HANDLE;
11561 ++m_2ndNullItemsCount;
11562 m_SumFreeSize += it->size;
11563 CleanupAfterFree();
11568 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11571 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11573 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11574 const size_t suballocCount = AccessSuballocations1st().size();
11575 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11578 void VmaBlockMetadata_Linear::CleanupAfterFree()
11580 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11581 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11585 suballocations1st.clear();
11586 suballocations2nd.clear();
11587 m_1stNullItemsBeginCount = 0;
11588 m_1stNullItemsMiddleCount = 0;
11589 m_2ndNullItemsCount = 0;
11590 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11594 const size_t suballoc1stCount = suballocations1st.size();
11595 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11596 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11599 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11600 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11602 ++m_1stNullItemsBeginCount;
11603 --m_1stNullItemsMiddleCount;
11607 while(m_1stNullItemsMiddleCount > 0 &&
11608 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11610 --m_1stNullItemsMiddleCount;
11611 suballocations1st.pop_back();
11615 while(m_2ndNullItemsCount > 0 &&
11616 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11618 --m_2ndNullItemsCount;
11619 suballocations2nd.pop_back();
11623 while(m_2ndNullItemsCount > 0 &&
11624 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11626 --m_2ndNullItemsCount;
11627 VmaVectorRemove(suballocations2nd, 0);
11630 if(ShouldCompact1st())
11632 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11633 size_t srcIndex = m_1stNullItemsBeginCount;
11634 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11636 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11640 if(dstIndex != srcIndex)
11642 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11646 suballocations1st.resize(nonNullItemCount);
11647 m_1stNullItemsBeginCount = 0;
11648 m_1stNullItemsMiddleCount = 0;
11652 if(suballocations2nd.empty())
11654 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11658 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11660 suballocations1st.clear();
11661 m_1stNullItemsBeginCount = 0;
11663 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11666 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11667 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11668 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11669 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11671 ++m_1stNullItemsBeginCount;
11672 --m_1stNullItemsMiddleCount;
11674 m_2ndNullItemsCount = 0;
11675 m_1stVectorIndex ^= 1;
11680 VMA_HEAVY_ASSERT(Validate());
11687 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11688 VmaBlockMetadata(hAllocator),
11690 m_AllocationCount(0),
11694 memset(m_FreeList, 0,
sizeof(m_FreeList));
11697 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11699 DeleteNode(m_Root);
11702 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11704 VmaBlockMetadata::Init(size);
11706 m_UsableSize = VmaPrevPow2(size);
11707 m_SumFreeSize = m_UsableSize;
11711 while(m_LevelCount < MAX_LEVELS &&
11712 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11717 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11718 rootNode->offset = 0;
11719 rootNode->type = Node::TYPE_FREE;
11720 rootNode->parent = VMA_NULL;
11721 rootNode->buddy = VMA_NULL;
11724 AddToFreeListFront(0, rootNode);
11727 bool VmaBlockMetadata_Buddy::Validate()
const
11730 ValidationContext ctx;
11731 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11733 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11735 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11736 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11739 for(uint32_t level = 0; level < m_LevelCount; ++level)
11741 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11742 m_FreeList[level].front->free.prev == VMA_NULL);
11744 for(Node* node = m_FreeList[level].front;
11746 node = node->free.next)
11748 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11750 if(node->free.next == VMA_NULL)
11752 VMA_VALIDATE(m_FreeList[level].back == node);
11756 VMA_VALIDATE(node->free.next->free.prev == node);
11762 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11764 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11770 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11772 for(uint32_t level = 0; level < m_LevelCount; ++level)
11774 if(m_FreeList[level].front != VMA_NULL)
11776 return LevelToNodeSize(level);
11782 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11784 const VkDeviceSize unusableSize = GetUnusableSize();
11795 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11797 if(unusableSize > 0)
11806 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11808 const VkDeviceSize unusableSize = GetUnusableSize();
11810 inoutStats.
size += GetSize();
11811 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11816 if(unusableSize > 0)
11823 #if VMA_STATS_STRING_ENABLED
11825 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11829 CalcAllocationStatInfo(stat);
11831 PrintDetailedMap_Begin(
11837 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11839 const VkDeviceSize unusableSize = GetUnusableSize();
11840 if(unusableSize > 0)
11842 PrintDetailedMap_UnusedRange(json,
11847 PrintDetailedMap_End(json);
11850 #endif // #if VMA_STATS_STRING_ENABLED
11852 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11853 uint32_t currentFrameIndex,
11854 uint32_t frameInUseCount,
11855 VkDeviceSize bufferImageGranularity,
11856 VkDeviceSize allocSize,
11857 VkDeviceSize allocAlignment,
11859 VmaSuballocationType allocType,
11860 bool canMakeOtherLost,
11862 VmaAllocationRequest* pAllocationRequest)
11864 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11868 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11869 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11870 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11872 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11873 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11876 if(allocSize > m_UsableSize)
11881 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11882 for(uint32_t level = targetLevel + 1; level--; )
11884 for(Node* freeNode = m_FreeList[level].front;
11885 freeNode != VMA_NULL;
11886 freeNode = freeNode->free.next)
11888 if(freeNode->offset % allocAlignment == 0)
11890 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11891 pAllocationRequest->offset = freeNode->offset;
11892 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11893 pAllocationRequest->sumItemSize = 0;
11894 pAllocationRequest->itemsToMakeLostCount = 0;
11895 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11904 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11905 uint32_t currentFrameIndex,
11906 uint32_t frameInUseCount,
11907 VmaAllocationRequest* pAllocationRequest)
11913 return pAllocationRequest->itemsToMakeLostCount == 0;
11916 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11925 void VmaBlockMetadata_Buddy::Alloc(
11926 const VmaAllocationRequest& request,
11927 VmaSuballocationType type,
11928 VkDeviceSize allocSize,
11931 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11933 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11934 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11936 Node* currNode = m_FreeList[currLevel].front;
11937 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11938 while(currNode->offset != request.offset)
11940 currNode = currNode->free.next;
11941 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11945 while(currLevel < targetLevel)
11949 RemoveFromFreeList(currLevel, currNode);
11951 const uint32_t childrenLevel = currLevel + 1;
11954 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11955 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11957 leftChild->offset = currNode->offset;
11958 leftChild->type = Node::TYPE_FREE;
11959 leftChild->parent = currNode;
11960 leftChild->buddy = rightChild;
11962 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11963 rightChild->type = Node::TYPE_FREE;
11964 rightChild->parent = currNode;
11965 rightChild->buddy = leftChild;
11968 currNode->type = Node::TYPE_SPLIT;
11969 currNode->split.leftChild = leftChild;
11972 AddToFreeListFront(childrenLevel, rightChild);
11973 AddToFreeListFront(childrenLevel, leftChild);
11978 currNode = m_FreeList[currLevel].front;
11987 VMA_ASSERT(currLevel == targetLevel &&
11988 currNode != VMA_NULL &&
11989 currNode->type == Node::TYPE_FREE);
11990 RemoveFromFreeList(currLevel, currNode);
11993 currNode->type = Node::TYPE_ALLOCATION;
11994 currNode->allocation.alloc = hAllocation;
11996 ++m_AllocationCount;
11998 m_SumFreeSize -= allocSize;
12001 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12003 if(node->type == Node::TYPE_SPLIT)
12005 DeleteNode(node->split.leftChild->buddy);
12006 DeleteNode(node->split.leftChild);
12009 vma_delete(GetAllocationCallbacks(), node);
12012 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12014 VMA_VALIDATE(level < m_LevelCount);
12015 VMA_VALIDATE(curr->parent == parent);
12016 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12017 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12020 case Node::TYPE_FREE:
12022 ctx.calculatedSumFreeSize += levelNodeSize;
12023 ++ctx.calculatedFreeCount;
12025 case Node::TYPE_ALLOCATION:
12026 ++ctx.calculatedAllocationCount;
12027 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12028 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12030 case Node::TYPE_SPLIT:
12032 const uint32_t childrenLevel = level + 1;
12033 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12034 const Node*
const leftChild = curr->split.leftChild;
12035 VMA_VALIDATE(leftChild != VMA_NULL);
12036 VMA_VALIDATE(leftChild->offset == curr->offset);
12037 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12039 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12041 const Node*
const rightChild = leftChild->buddy;
12042 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12043 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12045 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12056 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12059 uint32_t level = 0;
12060 VkDeviceSize currLevelNodeSize = m_UsableSize;
12061 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12062 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12065 currLevelNodeSize = nextLevelNodeSize;
12066 nextLevelNodeSize = currLevelNodeSize >> 1;
12071 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12074 Node* node = m_Root;
12075 VkDeviceSize nodeOffset = 0;
12076 uint32_t level = 0;
12077 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12078 while(node->type == Node::TYPE_SPLIT)
12080 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12081 if(offset < nodeOffset + nextLevelSize)
12083 node = node->split.leftChild;
12087 node = node->split.leftChild->buddy;
12088 nodeOffset += nextLevelSize;
12091 levelNodeSize = nextLevelSize;
12094 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12095 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12098 --m_AllocationCount;
12099 m_SumFreeSize += alloc->GetSize();
12101 node->type = Node::TYPE_FREE;
12104 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12106 RemoveFromFreeList(level, node->buddy);
12107 Node*
const parent = node->parent;
12109 vma_delete(GetAllocationCallbacks(), node->buddy);
12110 vma_delete(GetAllocationCallbacks(), node);
12111 parent->type = Node::TYPE_FREE;
12119 AddToFreeListFront(level, node);
12122 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12126 case Node::TYPE_FREE:
12132 case Node::TYPE_ALLOCATION:
12134 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12140 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12141 if(unusedRangeSize > 0)
12150 case Node::TYPE_SPLIT:
12152 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12153 const Node*
const leftChild = node->split.leftChild;
12154 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12155 const Node*
const rightChild = leftChild->buddy;
12156 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12164 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12166 VMA_ASSERT(node->type == Node::TYPE_FREE);
12169 Node*
const frontNode = m_FreeList[level].front;
12170 if(frontNode == VMA_NULL)
12172 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12173 node->free.prev = node->free.next = VMA_NULL;
12174 m_FreeList[level].front = m_FreeList[level].back = node;
12178 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12179 node->free.prev = VMA_NULL;
12180 node->free.next = frontNode;
12181 frontNode->free.prev = node;
12182 m_FreeList[level].front = node;
12186 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12188 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12191 if(node->free.prev == VMA_NULL)
12193 VMA_ASSERT(m_FreeList[level].front == node);
12194 m_FreeList[level].front = node->free.next;
12198 Node*
const prevFreeNode = node->free.prev;
12199 VMA_ASSERT(prevFreeNode->free.next == node);
12200 prevFreeNode->free.next = node->free.next;
12204 if(node->free.next == VMA_NULL)
12206 VMA_ASSERT(m_FreeList[level].back == node);
12207 m_FreeList[level].back = node->free.prev;
12211 Node*
const nextFreeNode = node->free.next;
12212 VMA_ASSERT(nextFreeNode->free.prev == node);
12213 nextFreeNode->free.prev = node->free.prev;
12217 #if VMA_STATS_STRING_ENABLED
12218 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12222 case Node::TYPE_FREE:
12223 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12225 case Node::TYPE_ALLOCATION:
12227 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12228 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12229 if(allocSize < levelNodeSize)
12231 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12235 case Node::TYPE_SPLIT:
12237 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12238 const Node*
const leftChild = node->split.leftChild;
12239 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12240 const Node*
const rightChild = leftChild->buddy;
12241 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12248 #endif // #if VMA_STATS_STRING_ENABLED
12254 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12255 m_pMetadata(VMA_NULL),
12256 m_MemoryTypeIndex(UINT32_MAX),
12258 m_hMemory(VK_NULL_HANDLE),
12260 m_pMappedData(VMA_NULL)
12264 void VmaDeviceMemoryBlock::Init(
12267 uint32_t newMemoryTypeIndex,
12268 VkDeviceMemory newMemory,
12269 VkDeviceSize newSize,
12271 uint32_t algorithm)
12273 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12275 m_hParentPool = hParentPool;
12276 m_MemoryTypeIndex = newMemoryTypeIndex;
12278 m_hMemory = newMemory;
12283 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12286 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12292 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12294 m_pMetadata->Init(newSize);
12297 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12301 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12303 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12304 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12305 m_hMemory = VK_NULL_HANDLE;
12307 vma_delete(allocator, m_pMetadata);
12308 m_pMetadata = VMA_NULL;
12311 bool VmaDeviceMemoryBlock::Validate()
const
12313 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12314 (m_pMetadata->GetSize() != 0));
12316 return m_pMetadata->Validate();
12319 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12321 void* pData =
nullptr;
12322 VkResult res = Map(hAllocator, 1, &pData);
12323 if(res != VK_SUCCESS)
12328 res = m_pMetadata->CheckCorruption(pData);
12330 Unmap(hAllocator, 1);
12335 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12342 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12343 if(m_MapCount != 0)
12345 m_MapCount += count;
12346 VMA_ASSERT(m_pMappedData != VMA_NULL);
12347 if(ppData != VMA_NULL)
12349 *ppData = m_pMappedData;
12355 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12356 hAllocator->m_hDevice,
12362 if(result == VK_SUCCESS)
12364 if(ppData != VMA_NULL)
12366 *ppData = m_pMappedData;
12368 m_MapCount = count;
12374 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12381 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12382 if(m_MapCount >= count)
12384 m_MapCount -= count;
12385 if(m_MapCount == 0)
12387 m_pMappedData = VMA_NULL;
12388 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12393 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12397 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12399 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12400 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12403 VkResult res = Map(hAllocator, 1, &pData);
12404 if(res != VK_SUCCESS)
12409 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12410 VmaWriteMagicValue(pData, allocOffset + allocSize);
12412 Unmap(hAllocator, 1);
12417 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12419 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12420 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12423 VkResult res = Map(hAllocator, 1, &pData);
12424 if(res != VK_SUCCESS)
12429 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12431 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12433 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12435 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12438 Unmap(hAllocator, 1);
12443 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12446 VkDeviceSize allocationLocalOffset,
12450 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12451 hAllocation->GetBlock() ==
this);
12452 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12453 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12454 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12456 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12457 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12460 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12463 VkDeviceSize allocationLocalOffset,
12467 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12468 hAllocation->GetBlock() ==
this);
12469 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12470 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12471 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12473 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12474 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12479 memset(&outInfo, 0,
sizeof(outInfo));
12498 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12506 VmaPool_T::VmaPool_T(
12509 VkDeviceSize preferredBlockSize) :
12513 createInfo.memoryTypeIndex,
12514 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12515 createInfo.minBlockCount,
12516 createInfo.maxBlockCount,
12518 createInfo.frameInUseCount,
12519 createInfo.blockSize != 0,
12526 VmaPool_T::~VmaPool_T()
12530 void VmaPool_T::SetName(
const char* pName)
12532 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12533 VmaFreeString(allocs, m_Name);
12535 if(pName != VMA_NULL)
12537 m_Name = VmaCreateStringCopy(allocs, pName);
12545 #if VMA_STATS_STRING_ENABLED
12547 #endif // #if VMA_STATS_STRING_ENABLED
12549 VmaBlockVector::VmaBlockVector(
12552 uint32_t memoryTypeIndex,
12553 VkDeviceSize preferredBlockSize,
12554 size_t minBlockCount,
12555 size_t maxBlockCount,
12556 VkDeviceSize bufferImageGranularity,
12557 uint32_t frameInUseCount,
12558 bool explicitBlockSize,
12559 uint32_t algorithm) :
12560 m_hAllocator(hAllocator),
12561 m_hParentPool(hParentPool),
12562 m_MemoryTypeIndex(memoryTypeIndex),
12563 m_PreferredBlockSize(preferredBlockSize),
12564 m_MinBlockCount(minBlockCount),
12565 m_MaxBlockCount(maxBlockCount),
12566 m_BufferImageGranularity(bufferImageGranularity),
12567 m_FrameInUseCount(frameInUseCount),
12568 m_ExplicitBlockSize(explicitBlockSize),
12569 m_Algorithm(algorithm),
12570 m_HasEmptyBlock(false),
12571 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12576 VmaBlockVector::~VmaBlockVector()
12578 for(
size_t i = m_Blocks.size(); i--; )
12580 m_Blocks[i]->Destroy(m_hAllocator);
12581 vma_delete(m_hAllocator, m_Blocks[i]);
12585 VkResult VmaBlockVector::CreateMinBlocks()
12587 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12589 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12590 if(res != VK_SUCCESS)
12598 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12600 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12602 const size_t blockCount = m_Blocks.size();
12611 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12613 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12614 VMA_ASSERT(pBlock);
12615 VMA_HEAVY_ASSERT(pBlock->Validate());
12616 pBlock->m_pMetadata->AddPoolStats(*pStats);
12620 bool VmaBlockVector::IsEmpty()
12622 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12623 return m_Blocks.empty();
12626 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12628 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12629 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12630 (VMA_DEBUG_MARGIN > 0) &&
12632 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12635 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12637 VkResult VmaBlockVector::Allocate(
12638 uint32_t currentFrameIndex,
12640 VkDeviceSize alignment,
12642 VmaSuballocationType suballocType,
12643 size_t allocationCount,
12647 VkResult res = VK_SUCCESS;
12649 if(IsCorruptionDetectionEnabled())
12651 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12652 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12656 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12657 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12659 res = AllocatePage(
12665 pAllocations + allocIndex);
12666 if(res != VK_SUCCESS)
12673 if(res != VK_SUCCESS)
12676 while(allocIndex--)
12678 Free(pAllocations[allocIndex]);
12680 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12686 VkResult VmaBlockVector::AllocatePage(
12687 uint32_t currentFrameIndex,
12689 VkDeviceSize alignment,
12691 VmaSuballocationType suballocType,
12699 VkDeviceSize freeMemory;
12701 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12703 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12707 const bool canFallbackToDedicated = !IsCustomPool();
12708 const bool canCreateNewBlock =
12710 (m_Blocks.size() < m_MaxBlockCount) &&
12711 (freeMemory >= size || !canFallbackToDedicated);
12718 canMakeOtherLost =
false;
12722 if(isUpperAddress &&
12725 return VK_ERROR_FEATURE_NOT_PRESENT;
12739 return VK_ERROR_FEATURE_NOT_PRESENT;
12743 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12745 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12753 if(!canMakeOtherLost || canCreateNewBlock)
12762 if(!m_Blocks.empty())
12764 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12765 VMA_ASSERT(pCurrBlock);
12766 VkResult res = AllocateFromBlock(
12776 if(res == VK_SUCCESS)
12778 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12788 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12790 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12791 VMA_ASSERT(pCurrBlock);
12792 VkResult res = AllocateFromBlock(
12802 if(res == VK_SUCCESS)
12804 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12812 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12814 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12815 VMA_ASSERT(pCurrBlock);
12816 VkResult res = AllocateFromBlock(
12826 if(res == VK_SUCCESS)
12828 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12836 if(canCreateNewBlock)
12839 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12840 uint32_t newBlockSizeShift = 0;
12841 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12843 if(!m_ExplicitBlockSize)
12846 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12847 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12849 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12850 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12852 newBlockSize = smallerNewBlockSize;
12853 ++newBlockSizeShift;
12862 size_t newBlockIndex = 0;
12863 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12864 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12866 if(!m_ExplicitBlockSize)
12868 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12870 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12871 if(smallerNewBlockSize >= size)
12873 newBlockSize = smallerNewBlockSize;
12874 ++newBlockSizeShift;
12875 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12876 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12885 if(res == VK_SUCCESS)
12887 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12888 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12890 res = AllocateFromBlock(
12900 if(res == VK_SUCCESS)
12902 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12908 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12915 if(canMakeOtherLost)
12917 uint32_t tryIndex = 0;
12918 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12920 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12921 VmaAllocationRequest bestRequest = {};
12922 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12928 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12930 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12931 VMA_ASSERT(pCurrBlock);
12932 VmaAllocationRequest currRequest = {};
12933 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12936 m_BufferImageGranularity,
12945 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12946 if(pBestRequestBlock == VMA_NULL ||
12947 currRequestCost < bestRequestCost)
12949 pBestRequestBlock = pCurrBlock;
12950 bestRequest = currRequest;
12951 bestRequestCost = currRequestCost;
12953 if(bestRequestCost == 0)
12964 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12966 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12967 VMA_ASSERT(pCurrBlock);
12968 VmaAllocationRequest currRequest = {};
12969 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12972 m_BufferImageGranularity,
12981 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12982 if(pBestRequestBlock == VMA_NULL ||
12983 currRequestCost < bestRequestCost ||
12986 pBestRequestBlock = pCurrBlock;
12987 bestRequest = currRequest;
12988 bestRequestCost = currRequestCost;
12990 if(bestRequestCost == 0 ||
13000 if(pBestRequestBlock != VMA_NULL)
13004 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13005 if(res != VK_SUCCESS)
13011 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13017 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13018 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13019 UpdateHasEmptyBlock();
13020 (*pAllocation)->InitBlockAllocation(
13022 bestRequest.offset,
13029 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13030 VMA_DEBUG_LOG(
" Returned from existing block");
13031 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13032 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13033 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13035 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13037 if(IsCorruptionDetectionEnabled())
13039 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13040 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13055 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13057 return VK_ERROR_TOO_MANY_OBJECTS;
13061 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13064 void VmaBlockVector::Free(
13067 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13069 bool budgetExceeded =
false;
13071 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13073 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13074 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13079 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13081 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13083 if(IsCorruptionDetectionEnabled())
13085 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13086 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13089 if(hAllocation->IsPersistentMap())
13091 pBlock->Unmap(m_hAllocator, 1);
13094 pBlock->m_pMetadata->Free(hAllocation);
13095 VMA_HEAVY_ASSERT(pBlock->Validate());
13097 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13099 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13101 if(pBlock->m_pMetadata->IsEmpty())
13104 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13106 pBlockToDelete = pBlock;
13113 else if(m_HasEmptyBlock && canDeleteBlock)
13115 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13116 if(pLastBlock->m_pMetadata->IsEmpty())
13118 pBlockToDelete = pLastBlock;
13119 m_Blocks.pop_back();
13123 UpdateHasEmptyBlock();
13124 IncrementallySortBlocks();
13129 if(pBlockToDelete != VMA_NULL)
13131 VMA_DEBUG_LOG(
" Deleted empty block");
13132 pBlockToDelete->Destroy(m_hAllocator);
13133 vma_delete(m_hAllocator, pBlockToDelete);
13137 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13139 VkDeviceSize result = 0;
13140 for(
size_t i = m_Blocks.size(); i--; )
13142 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13143 if(result >= m_PreferredBlockSize)
13151 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13153 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13155 if(m_Blocks[blockIndex] == pBlock)
13157 VmaVectorRemove(m_Blocks, blockIndex);
13164 void VmaBlockVector::IncrementallySortBlocks()
13169 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13171 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13173 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13180 VkResult VmaBlockVector::AllocateFromBlock(
13181 VmaDeviceMemoryBlock* pBlock,
13182 uint32_t currentFrameIndex,
13184 VkDeviceSize alignment,
13187 VmaSuballocationType suballocType,
13196 VmaAllocationRequest currRequest = {};
13197 if(pBlock->m_pMetadata->CreateAllocationRequest(
13200 m_BufferImageGranularity,
13210 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13214 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13215 if(res != VK_SUCCESS)
13221 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13222 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13223 UpdateHasEmptyBlock();
13224 (*pAllocation)->InitBlockAllocation(
13226 currRequest.offset,
13233 VMA_HEAVY_ASSERT(pBlock->Validate());
13234 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13235 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13236 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13238 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13240 if(IsCorruptionDetectionEnabled())
13242 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13243 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13247 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13250 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13252 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13253 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13254 allocInfo.allocationSize = blockSize;
13256 #if VMA_BUFFER_DEVICE_ADDRESS
13258 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13259 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13261 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13262 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13264 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
13266 VkDeviceMemory mem = VK_NULL_HANDLE;
13267 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13276 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13282 allocInfo.allocationSize,
13286 m_Blocks.push_back(pBlock);
13287 if(pNewBlockIndex != VMA_NULL)
13289 *pNewBlockIndex = m_Blocks.size() - 1;
13295 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13296 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13297 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13299 const size_t blockCount = m_Blocks.size();
13300 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13304 BLOCK_FLAG_USED = 0x00000001,
13305 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13313 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13314 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13315 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13318 const size_t moveCount = moves.size();
13319 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13321 const VmaDefragmentationMove& move = moves[moveIndex];
13322 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13323 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13326 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13329 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13331 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13332 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13333 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13335 currBlockInfo.pMappedData = pBlock->GetMappedData();
13337 if(currBlockInfo.pMappedData == VMA_NULL)
13339 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13340 if(pDefragCtx->res == VK_SUCCESS)
13342 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13349 if(pDefragCtx->res == VK_SUCCESS)
13351 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13352 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13354 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13356 const VmaDefragmentationMove& move = moves[moveIndex];
13358 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13359 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13361 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13366 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13367 memRange.memory = pSrcBlock->GetDeviceMemory();
13368 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13369 memRange.size = VMA_MIN(
13370 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13371 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13372 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13377 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13378 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13379 static_cast<size_t>(move.size));
13381 if(IsCorruptionDetectionEnabled())
13383 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13384 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13390 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13391 memRange.memory = pDstBlock->GetDeviceMemory();
13392 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13393 memRange.size = VMA_MIN(
13394 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13395 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13396 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13403 for(
size_t blockIndex = blockCount; blockIndex--; )
13405 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13406 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13408 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13409 pBlock->Unmap(m_hAllocator, 1);
13414 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13415 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13416 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13417 VkCommandBuffer commandBuffer)
13419 const size_t blockCount = m_Blocks.size();
13421 pDefragCtx->blockContexts.resize(blockCount);
13422 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13425 const size_t moveCount = moves.size();
13426 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13428 const VmaDefragmentationMove& move = moves[moveIndex];
13433 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13434 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13438 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13442 VkBufferCreateInfo bufCreateInfo;
13443 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13445 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13447 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13448 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13449 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13451 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13452 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13453 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13454 if(pDefragCtx->res == VK_SUCCESS)
13456 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13457 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13464 if(pDefragCtx->res == VK_SUCCESS)
13466 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13468 const VmaDefragmentationMove& move = moves[moveIndex];
13470 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13471 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13473 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13475 VkBufferCopy region = {
13479 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13480 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13485 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13487 pDefragCtx->res = VK_NOT_READY;
13493 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13495 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13496 if(pBlock->m_pMetadata->IsEmpty())
13498 if(m_Blocks.size() > m_MinBlockCount)
13500 if(pDefragmentationStats != VMA_NULL)
13503 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13506 VmaVectorRemove(m_Blocks, blockIndex);
13507 pBlock->Destroy(m_hAllocator);
13508 vma_delete(m_hAllocator, pBlock);
13516 UpdateHasEmptyBlock();
13519 void VmaBlockVector::UpdateHasEmptyBlock()
13521 m_HasEmptyBlock =
false;
13522 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13524 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13525 if(pBlock->m_pMetadata->IsEmpty())
13527 m_HasEmptyBlock =
true;
13533 #if VMA_STATS_STRING_ENABLED
13535 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13537 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13539 json.BeginObject();
13543 const char* poolName = m_hParentPool->GetName();
13544 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13546 json.WriteString(
"Name");
13547 json.WriteString(poolName);
13550 json.WriteString(
"MemoryTypeIndex");
13551 json.WriteNumber(m_MemoryTypeIndex);
13553 json.WriteString(
"BlockSize");
13554 json.WriteNumber(m_PreferredBlockSize);
13556 json.WriteString(
"BlockCount");
13557 json.BeginObject(
true);
13558 if(m_MinBlockCount > 0)
13560 json.WriteString(
"Min");
13561 json.WriteNumber((uint64_t)m_MinBlockCount);
13563 if(m_MaxBlockCount < SIZE_MAX)
13565 json.WriteString(
"Max");
13566 json.WriteNumber((uint64_t)m_MaxBlockCount);
13568 json.WriteString(
"Cur");
13569 json.WriteNumber((uint64_t)m_Blocks.size());
13572 if(m_FrameInUseCount > 0)
13574 json.WriteString(
"FrameInUseCount");
13575 json.WriteNumber(m_FrameInUseCount);
13578 if(m_Algorithm != 0)
13580 json.WriteString(
"Algorithm");
13581 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13586 json.WriteString(
"PreferredBlockSize");
13587 json.WriteNumber(m_PreferredBlockSize);
13590 json.WriteString(
"Blocks");
13591 json.BeginObject();
13592 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13594 json.BeginString();
13595 json.ContinueString(m_Blocks[i]->GetId());
13598 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13605 #endif // #if VMA_STATS_STRING_ENABLED
13607 void VmaBlockVector::Defragment(
13608 class VmaBlockVectorDefragmentationContext* pCtx,
13610 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13611 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13612 VkCommandBuffer commandBuffer)
13614 pCtx->res = VK_SUCCESS;
13616 const VkMemoryPropertyFlags memPropFlags =
13617 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13618 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13620 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13622 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13623 !IsCorruptionDetectionEnabled() &&
13624 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13627 if(canDefragmentOnCpu || canDefragmentOnGpu)
13629 bool defragmentOnGpu;
13631 if(canDefragmentOnGpu != canDefragmentOnCpu)
13633 defragmentOnGpu = canDefragmentOnGpu;
13638 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13639 m_hAllocator->IsIntegratedGpu();
13642 bool overlappingMoveSupported = !defragmentOnGpu;
13644 if(m_hAllocator->m_UseMutex)
13648 if(!m_Mutex.TryLockWrite())
13650 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13656 m_Mutex.LockWrite();
13657 pCtx->mutexLocked =
true;
13661 pCtx->Begin(overlappingMoveSupported, flags);
13665 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13666 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13667 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13670 if(pStats != VMA_NULL)
13672 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13673 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13676 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13677 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13678 if(defragmentOnGpu)
13680 maxGpuBytesToMove -= bytesMoved;
13681 maxGpuAllocationsToMove -= allocationsMoved;
13685 maxCpuBytesToMove -= bytesMoved;
13686 maxCpuAllocationsToMove -= allocationsMoved;
13692 if(m_hAllocator->m_UseMutex)
13693 m_Mutex.UnlockWrite();
13695 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13696 pCtx->res = VK_NOT_READY;
13701 if(pCtx->res >= VK_SUCCESS)
13703 if(defragmentOnGpu)
13705 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13709 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13715 void VmaBlockVector::DefragmentationEnd(
13716 class VmaBlockVectorDefragmentationContext* pCtx,
13722 VMA_ASSERT(pCtx->mutexLocked ==
false);
13726 m_Mutex.LockWrite();
13727 pCtx->mutexLocked =
true;
13731 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13734 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13736 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13737 if(blockCtx.hBuffer)
13739 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13743 if(pCtx->res >= VK_SUCCESS)
13745 FreeEmptyBlocks(pStats);
13749 if(pCtx->mutexLocked)
13751 VMA_ASSERT(m_hAllocator->m_UseMutex);
13752 m_Mutex.UnlockWrite();
13756 uint32_t VmaBlockVector::ProcessDefragmentations(
13757 class VmaBlockVectorDefragmentationContext *pCtx,
13760 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13762 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13764 for(uint32_t i = 0; i < moveCount; ++ i)
13766 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13769 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13770 pMove->
offset = move.dstOffset;
13775 pCtx->defragmentationMovesProcessed += moveCount;
13780 void VmaBlockVector::CommitDefragmentations(
13781 class VmaBlockVectorDefragmentationContext *pCtx,
13784 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13786 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13788 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13790 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13791 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13794 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13795 FreeEmptyBlocks(pStats);
13798 size_t VmaBlockVector::CalcAllocationCount()
const
13801 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13803 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13808 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13810 if(m_BufferImageGranularity == 1)
13814 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13815 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13817 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13818 VMA_ASSERT(m_Algorithm == 0);
13819 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13820 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13828 void VmaBlockVector::MakePoolAllocationsLost(
13829 uint32_t currentFrameIndex,
13830 size_t* pLostAllocationCount)
13832 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13833 size_t lostAllocationCount = 0;
13834 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13836 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13837 VMA_ASSERT(pBlock);
13838 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13840 if(pLostAllocationCount != VMA_NULL)
13842 *pLostAllocationCount = lostAllocationCount;
13846 VkResult VmaBlockVector::CheckCorruption()
13848 if(!IsCorruptionDetectionEnabled())
13850 return VK_ERROR_FEATURE_NOT_PRESENT;
13853 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13854 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13856 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13857 VMA_ASSERT(pBlock);
13858 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13859 if(res != VK_SUCCESS)
13867 void VmaBlockVector::AddStats(
VmaStats* pStats)
13869 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13870 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13872 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13874 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13876 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13877 VMA_ASSERT(pBlock);
13878 VMA_HEAVY_ASSERT(pBlock->Validate());
13880 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13881 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13882 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13883 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13890 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13892 VmaBlockVector* pBlockVector,
13893 uint32_t currentFrameIndex,
13894 bool overlappingMoveSupported) :
13895 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13896 m_AllocationCount(0),
13897 m_AllAllocations(false),
13899 m_AllocationsMoved(0),
13900 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13903 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13904 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13906 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13907 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13908 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13909 m_Blocks.push_back(pBlockInfo);
13913 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13916 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13918 for(
size_t i = m_Blocks.size(); i--; )
13920 vma_delete(m_hAllocator, m_Blocks[i]);
13924 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13927 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13929 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13930 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13931 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13933 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13934 (*it)->m_Allocations.push_back(allocInfo);
13941 ++m_AllocationCount;
13945 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13946 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13947 VkDeviceSize maxBytesToMove,
13948 uint32_t maxAllocationsToMove,
13949 bool freeOldAllocations)
13951 if(m_Blocks.empty())
13964 size_t srcBlockMinIndex = 0;
13977 size_t srcBlockIndex = m_Blocks.size() - 1;
13978 size_t srcAllocIndex = SIZE_MAX;
13984 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13986 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13989 if(srcBlockIndex == srcBlockMinIndex)
13996 srcAllocIndex = SIZE_MAX;
14001 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14005 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14006 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14008 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14009 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14010 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14011 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14014 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14016 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14017 VmaAllocationRequest dstAllocRequest;
14018 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14019 m_CurrentFrameIndex,
14020 m_pBlockVector->GetFrameInUseCount(),
14021 m_pBlockVector->GetBufferImageGranularity(),
14028 &dstAllocRequest) &&
14030 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14032 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14035 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14036 (m_BytesMoved + size > maxBytesToMove))
14041 VmaDefragmentationMove move = {};
14042 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14043 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14044 move.srcOffset = srcOffset;
14045 move.dstOffset = dstAllocRequest.offset;
14047 move.hAllocation = allocInfo.m_hAllocation;
14048 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14049 move.pDstBlock = pDstBlockInfo->m_pBlock;
14051 moves.push_back(move);
14053 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14057 allocInfo.m_hAllocation);
14059 if(freeOldAllocations)
14061 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14062 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14065 if(allocInfo.m_pChanged != VMA_NULL)
14067 *allocInfo.m_pChanged = VK_TRUE;
14070 ++m_AllocationsMoved;
14071 m_BytesMoved += size;
14073 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14081 if(srcAllocIndex > 0)
14087 if(srcBlockIndex > 0)
14090 srcAllocIndex = SIZE_MAX;
14100 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14103 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14105 if(m_Blocks[i]->m_HasNonMovableAllocations)
14113 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14114 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14115 VkDeviceSize maxBytesToMove,
14116 uint32_t maxAllocationsToMove,
14119 if(!m_AllAllocations && m_AllocationCount == 0)
14124 const size_t blockCount = m_Blocks.size();
14125 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14127 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14129 if(m_AllAllocations)
14131 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14132 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14133 it != pMetadata->m_Suballocations.end();
14136 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14138 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14139 pBlockInfo->m_Allocations.push_back(allocInfo);
14144 pBlockInfo->CalcHasNonMovableAllocations();
14148 pBlockInfo->SortAllocationsByOffsetDescending();
14154 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14157 const uint32_t roundCount = 2;
14160 VkResult result = VK_SUCCESS;
14161 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14169 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14170 size_t dstBlockIndex, VkDeviceSize dstOffset,
14171 size_t srcBlockIndex, VkDeviceSize srcOffset)
14173 if(dstBlockIndex < srcBlockIndex)
14177 if(dstBlockIndex > srcBlockIndex)
14181 if(dstOffset < srcOffset)
14191 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14193 VmaBlockVector* pBlockVector,
14194 uint32_t currentFrameIndex,
14195 bool overlappingMoveSupported) :
14196 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14197 m_OverlappingMoveSupported(overlappingMoveSupported),
14198 m_AllocationCount(0),
14199 m_AllAllocations(false),
14201 m_AllocationsMoved(0),
14202 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14204 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14208 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14212 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14213 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14214 VkDeviceSize maxBytesToMove,
14215 uint32_t maxAllocationsToMove,
14218 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14220 const size_t blockCount = m_pBlockVector->GetBlockCount();
14221 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14226 PreprocessMetadata();
14230 m_BlockInfos.resize(blockCount);
14231 for(
size_t i = 0; i < blockCount; ++i)
14233 m_BlockInfos[i].origBlockIndex = i;
14236 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14237 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14238 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14243 FreeSpaceDatabase freeSpaceDb;
14245 size_t dstBlockInfoIndex = 0;
14246 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14247 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14248 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14249 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14250 VkDeviceSize dstOffset = 0;
14253 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14255 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14256 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14257 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14258 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14259 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14261 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14262 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14263 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14264 if(m_AllocationsMoved == maxAllocationsToMove ||
14265 m_BytesMoved + srcAllocSize > maxBytesToMove)
14270 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14272 VmaDefragmentationMove move = {};
14274 size_t freeSpaceInfoIndex;
14275 VkDeviceSize dstAllocOffset;
14276 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14277 freeSpaceInfoIndex, dstAllocOffset))
14279 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14280 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14281 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14284 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14286 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14290 VmaSuballocation suballoc = *srcSuballocIt;
14291 suballoc.offset = dstAllocOffset;
14292 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14293 m_BytesMoved += srcAllocSize;
14294 ++m_AllocationsMoved;
14296 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14298 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14299 srcSuballocIt = nextSuballocIt;
14301 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14303 move.srcBlockIndex = srcOrigBlockIndex;
14304 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14305 move.srcOffset = srcAllocOffset;
14306 move.dstOffset = dstAllocOffset;
14307 move.size = srcAllocSize;
14309 moves.push_back(move);
14316 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14318 VmaSuballocation suballoc = *srcSuballocIt;
14319 suballoc.offset = dstAllocOffset;
14320 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14321 m_BytesMoved += srcAllocSize;
14322 ++m_AllocationsMoved;
14324 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14326 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14327 srcSuballocIt = nextSuballocIt;
14329 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14331 move.srcBlockIndex = srcOrigBlockIndex;
14332 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14333 move.srcOffset = srcAllocOffset;
14334 move.dstOffset = dstAllocOffset;
14335 move.size = srcAllocSize;
14337 moves.push_back(move);
14342 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14345 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14346 dstAllocOffset + srcAllocSize > dstBlockSize)
14349 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14351 ++dstBlockInfoIndex;
14352 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14353 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14354 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14355 dstBlockSize = pDstMetadata->GetSize();
14357 dstAllocOffset = 0;
14361 if(dstBlockInfoIndex == srcBlockInfoIndex)
14363 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14365 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14367 bool skipOver = overlap;
14368 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14372 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14377 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14379 dstOffset = srcAllocOffset + srcAllocSize;
14385 srcSuballocIt->offset = dstAllocOffset;
14386 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14387 dstOffset = dstAllocOffset + srcAllocSize;
14388 m_BytesMoved += srcAllocSize;
14389 ++m_AllocationsMoved;
14392 move.srcBlockIndex = srcOrigBlockIndex;
14393 move.dstBlockIndex = dstOrigBlockIndex;
14394 move.srcOffset = srcAllocOffset;
14395 move.dstOffset = dstAllocOffset;
14396 move.size = srcAllocSize;
14398 moves.push_back(move);
14406 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14407 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14409 VmaSuballocation suballoc = *srcSuballocIt;
14410 suballoc.offset = dstAllocOffset;
14411 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14412 dstOffset = dstAllocOffset + srcAllocSize;
14413 m_BytesMoved += srcAllocSize;
14414 ++m_AllocationsMoved;
14416 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14418 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14419 srcSuballocIt = nextSuballocIt;
14421 pDstMetadata->m_Suballocations.push_back(suballoc);
14423 move.srcBlockIndex = srcOrigBlockIndex;
14424 move.dstBlockIndex = dstOrigBlockIndex;
14425 move.srcOffset = srcAllocOffset;
14426 move.dstOffset = dstAllocOffset;
14427 move.size = srcAllocSize;
14429 moves.push_back(move);
14435 m_BlockInfos.clear();
14437 PostprocessMetadata();
14442 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14444 const size_t blockCount = m_pBlockVector->GetBlockCount();
14445 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14447 VmaBlockMetadata_Generic*
const pMetadata =
14448 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14449 pMetadata->m_FreeCount = 0;
14450 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14451 pMetadata->m_FreeSuballocationsBySize.clear();
14452 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14453 it != pMetadata->m_Suballocations.end(); )
14455 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14457 VmaSuballocationList::iterator nextIt = it;
14459 pMetadata->m_Suballocations.erase(it);
14470 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14472 const size_t blockCount = m_pBlockVector->GetBlockCount();
14473 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14475 VmaBlockMetadata_Generic*
const pMetadata =
14476 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14477 const VkDeviceSize blockSize = pMetadata->GetSize();
14480 if(pMetadata->m_Suballocations.empty())
14482 pMetadata->m_FreeCount = 1;
14484 VmaSuballocation suballoc = {
14488 VMA_SUBALLOCATION_TYPE_FREE };
14489 pMetadata->m_Suballocations.push_back(suballoc);
14490 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14495 VkDeviceSize offset = 0;
14496 VmaSuballocationList::iterator it;
14497 for(it = pMetadata->m_Suballocations.begin();
14498 it != pMetadata->m_Suballocations.end();
14501 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14502 VMA_ASSERT(it->offset >= offset);
14505 if(it->offset > offset)
14507 ++pMetadata->m_FreeCount;
14508 const VkDeviceSize freeSize = it->offset - offset;
14509 VmaSuballocation suballoc = {
14513 VMA_SUBALLOCATION_TYPE_FREE };
14514 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14515 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14517 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14521 pMetadata->m_SumFreeSize -= it->size;
14522 offset = it->offset + it->size;
14526 if(offset < blockSize)
14528 ++pMetadata->m_FreeCount;
14529 const VkDeviceSize freeSize = blockSize - offset;
14530 VmaSuballocation suballoc = {
14534 VMA_SUBALLOCATION_TYPE_FREE };
14535 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14536 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14537 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14539 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14544 pMetadata->m_FreeSuballocationsBySize.begin(),
14545 pMetadata->m_FreeSuballocationsBySize.end(),
14546 VmaSuballocationItemSizeLess());
14549 VMA_HEAVY_ASSERT(pMetadata->Validate());
14553 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14556 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14557 while(it != pMetadata->m_Suballocations.end())
14559 if(it->offset < suballoc.offset)
14564 pMetadata->m_Suballocations.insert(it, suballoc);
14570 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14573 VmaBlockVector* pBlockVector,
14574 uint32_t currFrameIndex) :
14576 mutexLocked(false),
14577 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14578 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14579 defragmentationMovesProcessed(0),
14580 defragmentationMovesCommitted(0),
14581 hasDefragmentationPlan(0),
14582 m_hAllocator(hAllocator),
14583 m_hCustomPool(hCustomPool),
14584 m_pBlockVector(pBlockVector),
14585 m_CurrFrameIndex(currFrameIndex),
14586 m_pAlgorithm(VMA_NULL),
14587 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14588 m_AllAllocations(false)
14592 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14594 vma_delete(m_hAllocator, m_pAlgorithm);
14597 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14599 AllocInfo info = { hAlloc, pChanged };
14600 m_Allocations.push_back(info);
14603 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14605 const bool allAllocations = m_AllAllocations ||
14606 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14619 if(VMA_DEBUG_MARGIN == 0 &&
14621 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14624 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14625 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14629 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14630 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14635 m_pAlgorithm->AddAll();
14639 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14641 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14649 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14651 uint32_t currFrameIndex,
14654 m_hAllocator(hAllocator),
14655 m_CurrFrameIndex(currFrameIndex),
14658 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14660 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14663 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14665 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14667 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14668 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14669 vma_delete(m_hAllocator, pBlockVectorCtx);
14671 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14673 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14674 if(pBlockVectorCtx)
14676 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14677 vma_delete(m_hAllocator, pBlockVectorCtx);
14682 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14684 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14686 VmaPool pool = pPools[poolIndex];
14689 if(pool->m_BlockVector.GetAlgorithm() == 0)
14691 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14693 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14695 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14697 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14702 if(!pBlockVectorDefragCtx)
14704 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14707 &pool->m_BlockVector,
14709 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14712 pBlockVectorDefragCtx->AddAll();
14717 void VmaDefragmentationContext_T::AddAllocations(
14718 uint32_t allocationCount,
14720 VkBool32* pAllocationsChanged)
14723 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14726 VMA_ASSERT(hAlloc);
14728 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14730 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14732 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14734 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14736 if(hAllocPool != VK_NULL_HANDLE)
14739 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14741 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14743 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14745 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14749 if(!pBlockVectorDefragCtx)
14751 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14754 &hAllocPool->m_BlockVector,
14756 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14763 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14764 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14765 if(!pBlockVectorDefragCtx)
14767 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14770 m_hAllocator->m_pBlockVectors[memTypeIndex],
14772 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14776 if(pBlockVectorDefragCtx)
14778 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14779 &pAllocationsChanged[allocIndex] : VMA_NULL;
14780 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14786 VkResult VmaDefragmentationContext_T::Defragment(
14787 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14788 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14800 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14801 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14803 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14804 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14806 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14807 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14810 return VK_NOT_READY;
14813 if(commandBuffer == VK_NULL_HANDLE)
14815 maxGpuBytesToMove = 0;
14816 maxGpuAllocationsToMove = 0;
14819 VkResult res = VK_SUCCESS;
14822 for(uint32_t memTypeIndex = 0;
14823 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14826 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14827 if(pBlockVectorCtx)
14829 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14830 pBlockVectorCtx->GetBlockVector()->Defragment(
14833 maxCpuBytesToMove, maxCpuAllocationsToMove,
14834 maxGpuBytesToMove, maxGpuAllocationsToMove,
14836 if(pBlockVectorCtx->res != VK_SUCCESS)
14838 res = pBlockVectorCtx->res;
14844 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14845 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14848 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14849 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14850 pBlockVectorCtx->GetBlockVector()->Defragment(
14853 maxCpuBytesToMove, maxCpuAllocationsToMove,
14854 maxGpuBytesToMove, maxGpuAllocationsToMove,
14856 if(pBlockVectorCtx->res != VK_SUCCESS)
14858 res = pBlockVectorCtx->res;
14871 for(uint32_t memTypeIndex = 0;
14872 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14875 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14876 if(pBlockVectorCtx)
14878 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14880 if(!pBlockVectorCtx->hasDefragmentationPlan)
14882 pBlockVectorCtx->GetBlockVector()->Defragment(
14885 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14886 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14889 if(pBlockVectorCtx->res < VK_SUCCESS)
14892 pBlockVectorCtx->hasDefragmentationPlan =
true;
14895 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14897 pCurrentMove, movesLeft);
14899 movesLeft -= processed;
14900 pCurrentMove += processed;
14905 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14906 customCtxIndex < customCtxCount;
14909 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14910 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14912 if(!pBlockVectorCtx->hasDefragmentationPlan)
14914 pBlockVectorCtx->GetBlockVector()->Defragment(
14917 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14918 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14921 if(pBlockVectorCtx->res < VK_SUCCESS)
14924 pBlockVectorCtx->hasDefragmentationPlan =
true;
14927 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14929 pCurrentMove, movesLeft);
14931 movesLeft -= processed;
14932 pCurrentMove += processed;
14939 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14941 VkResult res = VK_SUCCESS;
14944 for(uint32_t memTypeIndex = 0;
14945 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14948 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14949 if(pBlockVectorCtx)
14951 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14953 if(!pBlockVectorCtx->hasDefragmentationPlan)
14955 res = VK_NOT_READY;
14959 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14960 pBlockVectorCtx, m_pStats);
14962 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14963 res = VK_NOT_READY;
14968 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14969 customCtxIndex < customCtxCount;
14972 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14973 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14975 if(!pBlockVectorCtx->hasDefragmentationPlan)
14977 res = VK_NOT_READY;
14981 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14982 pBlockVectorCtx, m_pStats);
14984 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14985 res = VK_NOT_READY;
14994 #if VMA_RECORDING_ENABLED
14996 VmaRecorder::VmaRecorder() :
15000 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15006 m_UseMutex = useMutex;
15007 m_Flags = settings.
flags;
15009 #if defined(_WIN32)
15011 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15015 return VK_ERROR_INITIALIZATION_FAILED;
15019 m_File = fopen(settings.
pFilePath,
"wb");
15023 return VK_ERROR_INITIALIZATION_FAILED;
15028 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15029 fprintf(m_File,
"%s\n",
"1,8");
15034 VmaRecorder::~VmaRecorder()
15036 if(m_File != VMA_NULL)
15042 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15044 CallParams callParams;
15045 GetBasicParams(callParams);
15047 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15048 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15052 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15054 CallParams callParams;
15055 GetBasicParams(callParams);
15057 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15058 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15064 CallParams callParams;
15065 GetBasicParams(callParams);
15067 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15068 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15079 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15081 CallParams callParams;
15082 GetBasicParams(callParams);
15084 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15085 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15090 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15091 const VkMemoryRequirements& vkMemReq,
15095 CallParams callParams;
15096 GetBasicParams(callParams);
15098 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15099 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15100 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15102 vkMemReq.alignment,
15103 vkMemReq.memoryTypeBits,
15111 userDataStr.GetString());
15115 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15116 const VkMemoryRequirements& vkMemReq,
15118 uint64_t allocationCount,
15121 CallParams callParams;
15122 GetBasicParams(callParams);
15124 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15125 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15126 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15128 vkMemReq.alignment,
15129 vkMemReq.memoryTypeBits,
15136 PrintPointerList(allocationCount, pAllocations);
15137 fprintf(m_File,
",%s\n", userDataStr.GetString());
15141 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15142 const VkMemoryRequirements& vkMemReq,
15143 bool requiresDedicatedAllocation,
15144 bool prefersDedicatedAllocation,
15148 CallParams callParams;
15149 GetBasicParams(callParams);
15151 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15152 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15153 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15155 vkMemReq.alignment,
15156 vkMemReq.memoryTypeBits,
15157 requiresDedicatedAllocation ? 1 : 0,
15158 prefersDedicatedAllocation ? 1 : 0,
15166 userDataStr.GetString());
15170 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15171 const VkMemoryRequirements& vkMemReq,
15172 bool requiresDedicatedAllocation,
15173 bool prefersDedicatedAllocation,
15177 CallParams callParams;
15178 GetBasicParams(callParams);
15180 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15181 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15182 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15184 vkMemReq.alignment,
15185 vkMemReq.memoryTypeBits,
15186 requiresDedicatedAllocation ? 1 : 0,
15187 prefersDedicatedAllocation ? 1 : 0,
15195 userDataStr.GetString());
15199 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15202 CallParams callParams;
15203 GetBasicParams(callParams);
15205 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15206 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15211 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15212 uint64_t allocationCount,
15215 CallParams callParams;
15216 GetBasicParams(callParams);
15218 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15219 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15220 PrintPointerList(allocationCount, pAllocations);
15221 fprintf(m_File,
"\n");
15225 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15227 const void* pUserData)
15229 CallParams callParams;
15230 GetBasicParams(callParams);
15232 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15233 UserDataString userDataStr(
15236 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15238 userDataStr.GetString());
15242 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15245 CallParams callParams;
15246 GetBasicParams(callParams);
15248 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15249 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15254 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15257 CallParams callParams;
15258 GetBasicParams(callParams);
15260 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15261 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15266 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15269 CallParams callParams;
15270 GetBasicParams(callParams);
15272 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15273 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15278 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15279 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15281 CallParams callParams;
15282 GetBasicParams(callParams);
15284 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15285 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15292 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15293 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15295 CallParams callParams;
15296 GetBasicParams(callParams);
15298 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15299 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15306 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15307 const VkBufferCreateInfo& bufCreateInfo,
15311 CallParams callParams;
15312 GetBasicParams(callParams);
15314 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15315 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15316 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15317 bufCreateInfo.flags,
15318 bufCreateInfo.size,
15319 bufCreateInfo.usage,
15320 bufCreateInfo.sharingMode,
15321 allocCreateInfo.
flags,
15322 allocCreateInfo.
usage,
15326 allocCreateInfo.
pool,
15328 userDataStr.GetString());
15332 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15333 const VkImageCreateInfo& imageCreateInfo,
15337 CallParams callParams;
15338 GetBasicParams(callParams);
15340 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15341 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15342 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15343 imageCreateInfo.flags,
15344 imageCreateInfo.imageType,
15345 imageCreateInfo.format,
15346 imageCreateInfo.extent.width,
15347 imageCreateInfo.extent.height,
15348 imageCreateInfo.extent.depth,
15349 imageCreateInfo.mipLevels,
15350 imageCreateInfo.arrayLayers,
15351 imageCreateInfo.samples,
15352 imageCreateInfo.tiling,
15353 imageCreateInfo.usage,
15354 imageCreateInfo.sharingMode,
15355 imageCreateInfo.initialLayout,
15356 allocCreateInfo.
flags,
15357 allocCreateInfo.
usage,
15361 allocCreateInfo.
pool,
15363 userDataStr.GetString());
15367 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15370 CallParams callParams;
15371 GetBasicParams(callParams);
15373 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15374 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15379 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15382 CallParams callParams;
15383 GetBasicParams(callParams);
15385 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15386 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15391 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15394 CallParams callParams;
15395 GetBasicParams(callParams);
15397 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15398 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15403 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15406 CallParams callParams;
15407 GetBasicParams(callParams);
15409 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15410 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15415 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15418 CallParams callParams;
15419 GetBasicParams(callParams);
15421 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15422 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15427 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15431 CallParams callParams;
15432 GetBasicParams(callParams);
15434 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15435 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15438 fprintf(m_File,
",");
15440 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15450 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15453 CallParams callParams;
15454 GetBasicParams(callParams);
15456 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15457 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15462 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15466 CallParams callParams;
15467 GetBasicParams(callParams);
15469 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15470 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15471 pool, name != VMA_NULL ? name :
"");
15477 if(pUserData != VMA_NULL)
15481 m_Str = (
const char*)pUserData;
15486 snprintf(m_PtrStr, 17,
"%p", pUserData);
15496 void VmaRecorder::WriteConfiguration(
15497 const VkPhysicalDeviceProperties& devProps,
15498 const VkPhysicalDeviceMemoryProperties& memProps,
15499 uint32_t vulkanApiVersion,
15500 bool dedicatedAllocationExtensionEnabled,
15501 bool bindMemory2ExtensionEnabled,
15502 bool memoryBudgetExtensionEnabled,
15503 bool deviceCoherentMemoryExtensionEnabled)
15505 fprintf(m_File,
"Config,Begin\n");
15507 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15509 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15510 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15511 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15512 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15513 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15514 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15516 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15517 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15518 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15520 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15521 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15523 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15524 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15526 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15527 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15529 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15530 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15533 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15534 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15535 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15536 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15538 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15539 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15540 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15541 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15542 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15543 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15544 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15545 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15546 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15548 fprintf(m_File,
"Config,End\n");
15551 void VmaRecorder::GetBasicParams(CallParams& outParams)
15553 #if defined(_WIN32)
15554 outParams.threadId = GetCurrentThreadId();
15559 std::thread::id thread_id = std::this_thread::get_id();
15560 stringstream thread_id_to_string_converter;
15561 thread_id_to_string_converter << thread_id;
15562 string thread_id_as_string = thread_id_to_string_converter.str();
15563 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15566 auto current_time = std::chrono::high_resolution_clock::now();
15568 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15571 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15575 fprintf(m_File,
"%p", pItems[0]);
15576 for(uint64_t i = 1; i < count; ++i)
15578 fprintf(m_File,
" %p", pItems[i]);
15583 void VmaRecorder::Flush()
15591 #endif // #if VMA_RECORDING_ENABLED
15596 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15597 m_Allocator(pAllocationCallbacks, 1024)
15601 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15603 VmaMutexLock mutexLock(m_Mutex);
15604 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15607 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15609 VmaMutexLock mutexLock(m_Mutex);
15610 m_Allocator.Free(hAlloc);
15618 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15624 m_hDevice(pCreateInfo->device),
15625 m_hInstance(pCreateInfo->instance),
15626 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15627 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15628 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15629 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15630 m_HeapSizeLimitMask(0),
15631 m_PreferredLargeHeapBlockSize(0),
15632 m_PhysicalDevice(pCreateInfo->physicalDevice),
15633 m_CurrentFrameIndex(0),
15634 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15635 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15637 m_GlobalMemoryTypeBits(UINT32_MAX)
15639 ,m_pRecorder(VMA_NULL)
15642 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15644 m_UseKhrDedicatedAllocation =
false;
15645 m_UseKhrBindMemory2 =
false;
15648 if(VMA_DEBUG_DETECT_CORRUPTION)
15651 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15656 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15658 #if !(VMA_DEDICATED_ALLOCATION)
15661 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15664 #if !(VMA_BIND_MEMORY2)
15667 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15671 #if !(VMA_MEMORY_BUDGET)
15674 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15677 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15678 if(m_UseKhrBufferDeviceAddress)
15680 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15683 #if VMA_VULKAN_VERSION < 1002000
15684 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15686 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15689 #if VMA_VULKAN_VERSION < 1001000
15690 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15692 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15696 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15697 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15698 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15700 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15701 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15702 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15713 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15714 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15716 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15717 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15718 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15719 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15724 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15728 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15730 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15731 if(limit != VK_WHOLE_SIZE)
15733 m_HeapSizeLimitMask |= 1u << heapIndex;
15734 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15736 m_MemProps.memoryHeaps[heapIndex].size = limit;
15742 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15744 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15746 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15750 preferredBlockSize,
15753 GetBufferImageGranularity(),
15759 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15766 VkResult res = VK_SUCCESS;
15771 #if VMA_RECORDING_ENABLED
15772 m_pRecorder = vma_new(
this, VmaRecorder)();
15774 if(res != VK_SUCCESS)
15778 m_pRecorder->WriteConfiguration(
15779 m_PhysicalDeviceProperties,
15781 m_VulkanApiVersion,
15782 m_UseKhrDedicatedAllocation,
15783 m_UseKhrBindMemory2,
15784 m_UseExtMemoryBudget,
15785 m_UseAmdDeviceCoherentMemory);
15786 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15788 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15789 return VK_ERROR_FEATURE_NOT_PRESENT;
15793 #if VMA_MEMORY_BUDGET
15794 if(m_UseExtMemoryBudget)
15796 UpdateVulkanBudget();
15798 #endif // #if VMA_MEMORY_BUDGET
15803 VmaAllocator_T::~VmaAllocator_T()
15805 #if VMA_RECORDING_ENABLED
15806 if(m_pRecorder != VMA_NULL)
15808 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15809 vma_delete(
this, m_pRecorder);
15813 VMA_ASSERT(m_Pools.empty());
15815 for(
size_t i = GetMemoryTypeCount(); i--; )
15817 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15819 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15822 vma_delete(
this, m_pDedicatedAllocations[i]);
15823 vma_delete(
this, m_pBlockVectors[i]);
15827 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15829 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15830 ImportVulkanFunctions_Static();
15833 if(pVulkanFunctions != VMA_NULL)
15835 ImportVulkanFunctions_Custom(pVulkanFunctions);
15838 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15839 ImportVulkanFunctions_Dynamic();
15842 ValidateVulkanFunctions();
15845 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15847 void VmaAllocator_T::ImportVulkanFunctions_Static()
15850 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15851 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15852 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15853 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15854 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15855 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15856 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15857 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15858 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15859 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15860 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15861 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15862 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15863 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15864 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15865 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15866 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15869 #if VMA_VULKAN_VERSION >= 1001000
15870 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15872 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15873 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15874 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15875 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15876 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15881 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15883 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15885 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15887 #define VMA_COPY_IF_NOT_NULL(funcName) \
15888 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15890 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15891 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15892 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15893 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15894 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15895 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15896 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15897 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15898 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15899 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15900 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15901 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15902 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15903 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15904 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15905 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15906 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15908 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15909 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15910 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15913 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15914 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15915 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15918 #if VMA_MEMORY_BUDGET
15919 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15922 #undef VMA_COPY_IF_NOT_NULL
15925 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15927 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15929 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15930 if(m_VulkanFunctions.memberName == VMA_NULL) \
15931 m_VulkanFunctions.memberName = \
15932 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15933 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15934 if(m_VulkanFunctions.memberName == VMA_NULL) \
15935 m_VulkanFunctions.memberName = \
15936 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15938 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15939 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15940 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15941 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15942 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15943 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15944 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15945 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15946 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15947 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15948 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15949 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15950 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15951 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15952 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15953 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15954 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
15956 #if VMA_VULKAN_VERSION >= 1001000
15957 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15959 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
15960 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
15961 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
15962 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
15963 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
15967 #if VMA_DEDICATED_ALLOCATION
15968 if(m_UseKhrDedicatedAllocation)
15970 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
15971 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
15975 #if VMA_BIND_MEMORY2
15976 if(m_UseKhrBindMemory2)
15978 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
15979 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
15981 #endif // #if VMA_BIND_MEMORY2
15983 #if VMA_MEMORY_BUDGET
15984 if(m_UseExtMemoryBudget)
15986 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15988 #endif // #if VMA_MEMORY_BUDGET
15990 #undef VMA_FETCH_DEVICE_FUNC
15991 #undef VMA_FETCH_INSTANCE_FUNC
15994 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15996 void VmaAllocator_T::ValidateVulkanFunctions()
15998 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15999 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16000 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16001 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16002 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16003 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16004 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16005 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16006 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16007 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16008 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16009 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16010 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16011 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16012 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16013 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16014 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16016 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16017 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16019 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16020 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16024 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16025 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16027 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16028 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16032 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16033 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16035 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16040 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16042 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16043 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16044 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16045 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16048 VkResult VmaAllocator_T::AllocateMemoryOfType(
16050 VkDeviceSize alignment,
16051 bool dedicatedAllocation,
16052 VkBuffer dedicatedBuffer,
16053 VkBufferUsageFlags dedicatedBufferUsage,
16054 VkImage dedicatedImage,
16056 uint32_t memTypeIndex,
16057 VmaSuballocationType suballocType,
16058 size_t allocationCount,
16061 VMA_ASSERT(pAllocations != VMA_NULL);
16062 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16068 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16078 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16079 VMA_ASSERT(blockVector);
16081 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16082 bool preferDedicatedMemory =
16083 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16084 dedicatedAllocation ||
16086 size > preferredBlockSize / 2;
16088 if(preferDedicatedMemory &&
16090 finalCreateInfo.
pool == VK_NULL_HANDLE)
16099 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16103 return AllocateDedicatedMemory(
16112 dedicatedBufferUsage,
16120 VkResult res = blockVector->Allocate(
16121 m_CurrentFrameIndex.load(),
16128 if(res == VK_SUCCESS)
16136 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16140 res = AllocateDedicatedMemory(
16149 dedicatedBufferUsage,
16153 if(res == VK_SUCCESS)
16156 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16162 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16169 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16171 VmaSuballocationType suballocType,
16172 uint32_t memTypeIndex,
16175 bool isUserDataString,
16177 VkBuffer dedicatedBuffer,
16178 VkBufferUsageFlags dedicatedBufferUsage,
16179 VkImage dedicatedImage,
16180 size_t allocationCount,
16183 VMA_ASSERT(allocationCount > 0 && pAllocations);
16187 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16189 GetBudget(&heapBudget, heapIndex, 1);
16190 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16192 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16196 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16197 allocInfo.memoryTypeIndex = memTypeIndex;
16198 allocInfo.allocationSize = size;
16200 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16201 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16202 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16204 if(dedicatedBuffer != VK_NULL_HANDLE)
16206 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16207 dedicatedAllocInfo.buffer = dedicatedBuffer;
16208 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16210 else if(dedicatedImage != VK_NULL_HANDLE)
16212 dedicatedAllocInfo.image = dedicatedImage;
16213 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16216 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16218 #if VMA_BUFFER_DEVICE_ADDRESS
16219 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16220 if(m_UseKhrBufferDeviceAddress)
16222 bool canContainBufferWithDeviceAddress =
true;
16223 if(dedicatedBuffer != VK_NULL_HANDLE)
16225 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16226 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16228 else if(dedicatedImage != VK_NULL_HANDLE)
16230 canContainBufferWithDeviceAddress =
false;
16232 if(canContainBufferWithDeviceAddress)
16234 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16235 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16238 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
16241 VkResult res = VK_SUCCESS;
16242 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16244 res = AllocateDedicatedMemoryPage(
16252 pAllocations + allocIndex);
16253 if(res != VK_SUCCESS)
16259 if(res == VK_SUCCESS)
16263 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16264 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16265 VMA_ASSERT(pDedicatedAllocations);
16266 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16268 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
16272 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16277 while(allocIndex--)
16280 VkDeviceMemory hMemory = currAlloc->GetMemory();
16292 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16293 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16294 currAlloc->SetUserData(
this, VMA_NULL);
16295 m_AllocationObjectAllocator.Free(currAlloc);
16298 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16304 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16306 VmaSuballocationType suballocType,
16307 uint32_t memTypeIndex,
16308 const VkMemoryAllocateInfo& allocInfo,
16310 bool isUserDataString,
16314 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16315 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16318 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16322 void* pMappedData = VMA_NULL;
16325 res = (*m_VulkanFunctions.vkMapMemory)(
16334 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16335 FreeVulkanMemory(memTypeIndex, size, hMemory);
16340 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16341 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16342 (*pAllocation)->SetUserData(
this, pUserData);
16343 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16344 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16346 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16352 void VmaAllocator_T::GetBufferMemoryRequirements(
16354 VkMemoryRequirements& memReq,
16355 bool& requiresDedicatedAllocation,
16356 bool& prefersDedicatedAllocation)
const
16358 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16359 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16361 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16362 memReqInfo.buffer = hBuffer;
16364 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16366 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16367 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16369 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16371 memReq = memReq2.memoryRequirements;
16372 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16373 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16376 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16378 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16379 requiresDedicatedAllocation =
false;
16380 prefersDedicatedAllocation =
false;
16384 void VmaAllocator_T::GetImageMemoryRequirements(
16386 VkMemoryRequirements& memReq,
16387 bool& requiresDedicatedAllocation,
16388 bool& prefersDedicatedAllocation)
const
16390 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16391 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16393 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16394 memReqInfo.image = hImage;
16396 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16398 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16399 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16401 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16403 memReq = memReq2.memoryRequirements;
16404 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16405 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16408 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16410 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16411 requiresDedicatedAllocation =
false;
16412 prefersDedicatedAllocation =
false;
16416 VkResult VmaAllocator_T::AllocateMemory(
16417 const VkMemoryRequirements& vkMemReq,
16418 bool requiresDedicatedAllocation,
16419 bool prefersDedicatedAllocation,
16420 VkBuffer dedicatedBuffer,
16421 VkBufferUsageFlags dedicatedBufferUsage,
16422 VkImage dedicatedImage,
16424 VmaSuballocationType suballocType,
16425 size_t allocationCount,
16428 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16430 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16432 if(vkMemReq.size == 0)
16434 return VK_ERROR_VALIDATION_FAILED_EXT;
16439 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16440 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16445 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16446 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16448 if(requiresDedicatedAllocation)
16452 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16453 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16455 if(createInfo.
pool != VK_NULL_HANDLE)
16457 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16458 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16461 if((createInfo.
pool != VK_NULL_HANDLE) &&
16464 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16465 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16468 if(createInfo.
pool != VK_NULL_HANDLE)
16470 const VkDeviceSize alignmentForPool = VMA_MAX(
16471 vkMemReq.alignment,
16472 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16477 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16482 return createInfo.
pool->m_BlockVector.Allocate(
16483 m_CurrentFrameIndex.load(),
16494 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16495 uint32_t memTypeIndex = UINT32_MAX;
16497 if(res == VK_SUCCESS)
16499 VkDeviceSize alignmentForMemType = VMA_MAX(
16500 vkMemReq.alignment,
16501 GetMemoryTypeMinAlignment(memTypeIndex));
16503 res = AllocateMemoryOfType(
16505 alignmentForMemType,
16506 requiresDedicatedAllocation || prefersDedicatedAllocation,
16508 dedicatedBufferUsage,
16516 if(res == VK_SUCCESS)
16526 memoryTypeBits &= ~(1u << memTypeIndex);
16529 if(res == VK_SUCCESS)
16531 alignmentForMemType = VMA_MAX(
16532 vkMemReq.alignment,
16533 GetMemoryTypeMinAlignment(memTypeIndex));
16535 res = AllocateMemoryOfType(
16537 alignmentForMemType,
16538 requiresDedicatedAllocation || prefersDedicatedAllocation,
16540 dedicatedBufferUsage,
16548 if(res == VK_SUCCESS)
16558 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16569 void VmaAllocator_T::FreeMemory(
16570 size_t allocationCount,
16573 VMA_ASSERT(pAllocations);
16575 for(
size_t allocIndex = allocationCount; allocIndex--; )
16579 if(allocation != VK_NULL_HANDLE)
16581 if(TouchAllocation(allocation))
16583 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16585 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16588 switch(allocation->GetType())
16590 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16592 VmaBlockVector* pBlockVector = VMA_NULL;
16593 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16594 if(hPool != VK_NULL_HANDLE)
16596 pBlockVector = &hPool->m_BlockVector;
16600 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16601 pBlockVector = m_pBlockVectors[memTypeIndex];
16603 pBlockVector->Free(allocation);
16606 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16607 FreeDedicatedMemory(allocation);
16615 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16616 allocation->SetUserData(
this, VMA_NULL);
16617 m_AllocationObjectAllocator.Free(allocation);
16622 VkResult VmaAllocator_T::ResizeAllocation(
16624 VkDeviceSize newSize)
16627 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16629 return VK_ERROR_VALIDATION_FAILED_EXT;
16631 if(newSize == alloc->GetSize())
16635 return VK_ERROR_OUT_OF_POOL_MEMORY;
16638 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16641 InitStatInfo(pStats->
total);
16642 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16644 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16648 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16650 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16651 VMA_ASSERT(pBlockVector);
16652 pBlockVector->AddStats(pStats);
16657 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16658 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16660 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16665 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16667 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16668 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16669 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16670 VMA_ASSERT(pDedicatedAllocVector);
16671 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16674 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16675 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16676 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16677 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16682 VmaPostprocessCalcStatInfo(pStats->
total);
16683 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16684 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16685 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16686 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16689 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16691 #if VMA_MEMORY_BUDGET
16692 if(m_UseExtMemoryBudget)
16694 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16696 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16697 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16699 const uint32_t heapIndex = firstHeap + i;
16701 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16704 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16706 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16707 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16711 outBudget->
usage = 0;
16715 outBudget->
budget = VMA_MIN(
16716 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16721 UpdateVulkanBudget();
16722 GetBudget(outBudget, firstHeap, heapCount);
16728 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16730 const uint32_t heapIndex = firstHeap + i;
16732 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16736 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16741 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16743 VkResult VmaAllocator_T::DefragmentationBegin(
16753 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16754 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16757 (*pContext)->AddAllocations(
16760 VkResult res = (*pContext)->Defragment(
16765 if(res != VK_NOT_READY)
16767 vma_delete(
this, *pContext);
16768 *pContext = VMA_NULL;
16774 VkResult VmaAllocator_T::DefragmentationEnd(
16777 vma_delete(
this, context);
16781 VkResult VmaAllocator_T::DefragmentationPassBegin(
16785 return context->DefragmentPassBegin(pInfo);
16787 VkResult VmaAllocator_T::DefragmentationPassEnd(
16790 return context->DefragmentPassEnd();
16796 if(hAllocation->CanBecomeLost())
16802 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16803 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16806 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16810 pAllocationInfo->
offset = 0;
16811 pAllocationInfo->
size = hAllocation->GetSize();
16813 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16816 else if(localLastUseFrameIndex == localCurrFrameIndex)
16818 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16819 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16820 pAllocationInfo->
offset = hAllocation->GetOffset();
16821 pAllocationInfo->
size = hAllocation->GetSize();
16823 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16828 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16830 localLastUseFrameIndex = localCurrFrameIndex;
16837 #if VMA_STATS_STRING_ENABLED
16838 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16839 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16842 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16843 if(localLastUseFrameIndex == localCurrFrameIndex)
16849 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16851 localLastUseFrameIndex = localCurrFrameIndex;
16857 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16858 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16859 pAllocationInfo->
offset = hAllocation->GetOffset();
16860 pAllocationInfo->
size = hAllocation->GetSize();
16861 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16862 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16866 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16869 if(hAllocation->CanBecomeLost())
16871 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16872 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16875 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16879 else if(localLastUseFrameIndex == localCurrFrameIndex)
16885 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16887 localLastUseFrameIndex = localCurrFrameIndex;
16894 #if VMA_STATS_STRING_ENABLED
16895 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16896 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16899 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16900 if(localLastUseFrameIndex == localCurrFrameIndex)
16906 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16908 localLastUseFrameIndex = localCurrFrameIndex;
16920 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16930 return VK_ERROR_INITIALIZATION_FAILED;
16934 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16936 return VK_ERROR_FEATURE_NOT_PRESENT;
16939 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16941 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16943 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16944 if(res != VK_SUCCESS)
16946 vma_delete(
this, *pPool);
16953 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16954 (*pPool)->SetId(m_NextPoolId++);
16955 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16961 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16965 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16966 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16967 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16970 vma_delete(
this, pool);
16975 pool->m_BlockVector.GetPoolStats(pPoolStats);
16978 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16980 m_CurrentFrameIndex.store(frameIndex);
16982 #if VMA_MEMORY_BUDGET
16983 if(m_UseExtMemoryBudget)
16985 UpdateVulkanBudget();
16987 #endif // #if VMA_MEMORY_BUDGET
16990 void VmaAllocator_T::MakePoolAllocationsLost(
16992 size_t* pLostAllocationCount)
16994 hPool->m_BlockVector.MakePoolAllocationsLost(
16995 m_CurrentFrameIndex.load(),
16996 pLostAllocationCount);
16999 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17001 return hPool->m_BlockVector.CheckCorruption();
17004 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17006 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17009 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17011 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17013 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17014 VMA_ASSERT(pBlockVector);
17015 VkResult localRes = pBlockVector->CheckCorruption();
17018 case VK_ERROR_FEATURE_NOT_PRESENT:
17021 finalRes = VK_SUCCESS;
17031 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17032 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
17034 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17036 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
17039 case VK_ERROR_FEATURE_NOT_PRESENT:
17042 finalRes = VK_SUCCESS;
17054 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17056 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17057 (*pAllocation)->InitLost();
17060 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17062 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17065 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17067 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17068 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17071 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17072 if(blockBytesAfterAllocation > heapSize)
17074 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17076 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17084 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17088 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17090 if(res == VK_SUCCESS)
17092 #if VMA_MEMORY_BUDGET
17093 ++m_Budget.m_OperationsSinceBudgetFetch;
17097 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17099 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17104 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17110 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17113 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17115 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17119 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17121 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17124 VkResult VmaAllocator_T::BindVulkanBuffer(
17125 VkDeviceMemory memory,
17126 VkDeviceSize memoryOffset,
17130 if(pNext != VMA_NULL)
17132 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17133 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17134 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17136 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17137 bindBufferMemoryInfo.pNext = pNext;
17138 bindBufferMemoryInfo.buffer = buffer;
17139 bindBufferMemoryInfo.memory = memory;
17140 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17141 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17144 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17146 return VK_ERROR_EXTENSION_NOT_PRESENT;
17151 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17155 VkResult VmaAllocator_T::BindVulkanImage(
17156 VkDeviceMemory memory,
17157 VkDeviceSize memoryOffset,
17161 if(pNext != VMA_NULL)
17163 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17164 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17165 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17167 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17168 bindBufferMemoryInfo.pNext = pNext;
17169 bindBufferMemoryInfo.image = image;
17170 bindBufferMemoryInfo.memory = memory;
17171 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17172 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17175 #endif // #if VMA_BIND_MEMORY2
17177 return VK_ERROR_EXTENSION_NOT_PRESENT;
17182 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17186 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17188 if(hAllocation->CanBecomeLost())
17190 return VK_ERROR_MEMORY_MAP_FAILED;
17193 switch(hAllocation->GetType())
17195 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17197 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17198 char *pBytes = VMA_NULL;
17199 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17200 if(res == VK_SUCCESS)
17202 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17203 hAllocation->BlockAllocMap();
17207 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17208 return hAllocation->DedicatedAllocMap(
this, ppData);
17211 return VK_ERROR_MEMORY_MAP_FAILED;
17217 switch(hAllocation->GetType())
17219 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17221 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17222 hAllocation->BlockAllocUnmap();
17223 pBlock->Unmap(
this, 1);
17226 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17227 hAllocation->DedicatedAllocUnmap(
this);
17234 VkResult VmaAllocator_T::BindBufferMemory(
17236 VkDeviceSize allocationLocalOffset,
17240 VkResult res = VK_SUCCESS;
17241 switch(hAllocation->GetType())
17243 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17244 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17246 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17248 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17249 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17250 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17259 VkResult VmaAllocator_T::BindImageMemory(
17261 VkDeviceSize allocationLocalOffset,
17265 VkResult res = VK_SUCCESS;
17266 switch(hAllocation->GetType())
17268 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17269 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17271 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17273 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17274 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17275 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17284 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17286 VkDeviceSize offset, VkDeviceSize size,
17287 VMA_CACHE_OPERATION op)
17289 VkResult res = VK_SUCCESS;
17291 VkMappedMemoryRange memRange = {};
17292 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17296 case VMA_CACHE_FLUSH:
17297 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17299 case VMA_CACHE_INVALIDATE:
17300 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17310 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17311 uint32_t allocationCount,
17313 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17314 VMA_CACHE_OPERATION op)
17316 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17317 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17318 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17320 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17323 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17324 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17325 VkMappedMemoryRange newRange;
17326 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17328 ranges.push_back(newRange);
17332 VkResult res = VK_SUCCESS;
17333 if(!ranges.empty())
17337 case VMA_CACHE_FLUSH:
17338 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17340 case VMA_CACHE_INVALIDATE:
17341 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17351 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17353 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17355 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17357 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17358 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
17359 VMA_ASSERT(pDedicatedAllocations);
17360 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
17361 VMA_ASSERT(success);
17364 VkDeviceMemory hMemory = allocation->GetMemory();
17376 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17378 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17381 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17383 VkBufferCreateInfo dummyBufCreateInfo;
17384 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17386 uint32_t memoryTypeBits = 0;
17389 VkBuffer buf = VK_NULL_HANDLE;
17390 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17391 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17392 if(res == VK_SUCCESS)
17395 VkMemoryRequirements memReq;
17396 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17397 memoryTypeBits = memReq.memoryTypeBits;
17400 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17403 return memoryTypeBits;
17406 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17409 VMA_ASSERT(GetMemoryTypeCount() > 0);
17411 uint32_t memoryTypeBits = UINT32_MAX;
17413 if(!m_UseAmdDeviceCoherentMemory)
17416 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17418 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17420 memoryTypeBits &= ~(1u << memTypeIndex);
17425 return memoryTypeBits;
17428 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17430 VkDeviceSize offset, VkDeviceSize size,
17431 VkMappedMemoryRange& outRange)
const
17433 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17434 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17436 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17437 const VkDeviceSize allocationSize = allocation->GetSize();
17438 VMA_ASSERT(offset <= allocationSize);
17440 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17441 outRange.pNext = VMA_NULL;
17442 outRange.memory = allocation->GetMemory();
17444 switch(allocation->GetType())
17446 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17447 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17448 if(size == VK_WHOLE_SIZE)
17450 outRange.size = allocationSize - outRange.offset;
17454 VMA_ASSERT(offset + size <= allocationSize);
17455 outRange.size = VMA_MIN(
17456 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17457 allocationSize - outRange.offset);
17460 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17463 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17464 if(size == VK_WHOLE_SIZE)
17466 size = allocationSize - offset;
17470 VMA_ASSERT(offset + size <= allocationSize);
17472 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17475 const VkDeviceSize allocationOffset = allocation->GetOffset();
17476 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17477 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17478 outRange.offset += allocationOffset;
17479 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17491 #if VMA_MEMORY_BUDGET
17493 void VmaAllocator_T::UpdateVulkanBudget()
17495 VMA_ASSERT(m_UseExtMemoryBudget);
17497 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17499 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17500 VmaPnextChainPushFront(&memProps, &budgetProps);
17502 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17505 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17507 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17509 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17510 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17511 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17514 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17516 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17518 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17520 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17522 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17524 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17527 m_Budget.m_OperationsSinceBudgetFetch = 0;
17531 #endif // #if VMA_MEMORY_BUDGET
17533 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17535 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17536 !hAllocation->CanBecomeLost() &&
17537 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17539 void* pData = VMA_NULL;
17540 VkResult res = Map(hAllocation, &pData);
17541 if(res == VK_SUCCESS)
17543 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17544 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17545 Unmap(hAllocation);
17549 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17554 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17556 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17557 if(memoryTypeBits == UINT32_MAX)
17559 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17560 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17562 return memoryTypeBits;
17565 #if VMA_STATS_STRING_ENABLED
17567 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17569 bool dedicatedAllocationsStarted =
false;
17570 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17572 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17573 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17574 VMA_ASSERT(pDedicatedAllocVector);
17575 if(pDedicatedAllocVector->empty() ==
false)
17577 if(dedicatedAllocationsStarted ==
false)
17579 dedicatedAllocationsStarted =
true;
17580 json.WriteString(
"DedicatedAllocations");
17581 json.BeginObject();
17584 json.BeginString(
"Type ");
17585 json.ContinueString(memTypeIndex);
17590 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17592 json.BeginObject(
true);
17594 hAlloc->PrintParameters(json);
17601 if(dedicatedAllocationsStarted)
17607 bool allocationsStarted =
false;
17608 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17610 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17612 if(allocationsStarted ==
false)
17614 allocationsStarted =
true;
17615 json.WriteString(
"DefaultPools");
17616 json.BeginObject();
17619 json.BeginString(
"Type ");
17620 json.ContinueString(memTypeIndex);
17623 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17626 if(allocationsStarted)
17634 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17635 const size_t poolCount = m_Pools.size();
17638 json.WriteString(
"Pools");
17639 json.BeginObject();
17640 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17642 json.BeginString();
17643 json.ContinueString(m_Pools[poolIndex]->GetId());
17646 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17653 #endif // #if VMA_STATS_STRING_ENABLED
17662 VMA_ASSERT(pCreateInfo && pAllocator);
17665 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17667 return (*pAllocator)->Init(pCreateInfo);
17673 if(allocator != VK_NULL_HANDLE)
17675 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17676 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17677 vma_delete(&allocationCallbacks, allocator);
17683 VMA_ASSERT(allocator && pAllocatorInfo);
17684 pAllocatorInfo->
instance = allocator->m_hInstance;
17685 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17686 pAllocatorInfo->
device = allocator->m_hDevice;
17691 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17693 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17694 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17699 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17701 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17702 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17707 uint32_t memoryTypeIndex,
17708 VkMemoryPropertyFlags* pFlags)
17710 VMA_ASSERT(allocator && pFlags);
17711 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17712 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17717 uint32_t frameIndex)
17719 VMA_ASSERT(allocator);
17720 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17722 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17724 allocator->SetCurrentFrameIndex(frameIndex);
17731 VMA_ASSERT(allocator && pStats);
17732 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17733 allocator->CalculateStats(pStats);
17740 VMA_ASSERT(allocator && pBudget);
17741 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17742 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17745 #if VMA_STATS_STRING_ENABLED
17749 char** ppStatsString,
17750 VkBool32 detailedMap)
17752 VMA_ASSERT(allocator && ppStatsString);
17753 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17755 VmaStringBuilder sb(allocator);
17757 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17758 json.BeginObject();
17761 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17764 allocator->CalculateStats(&stats);
17766 json.WriteString(
"Total");
17767 VmaPrintStatInfo(json, stats.
total);
17769 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17771 json.BeginString(
"Heap ");
17772 json.ContinueString(heapIndex);
17774 json.BeginObject();
17776 json.WriteString(
"Size");
17777 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17779 json.WriteString(
"Flags");
17780 json.BeginArray(
true);
17781 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17783 json.WriteString(
"DEVICE_LOCAL");
17787 json.WriteString(
"Budget");
17788 json.BeginObject();
17790 json.WriteString(
"BlockBytes");
17791 json.WriteNumber(budget[heapIndex].blockBytes);
17792 json.WriteString(
"AllocationBytes");
17793 json.WriteNumber(budget[heapIndex].allocationBytes);
17794 json.WriteString(
"Usage");
17795 json.WriteNumber(budget[heapIndex].usage);
17796 json.WriteString(
"Budget");
17797 json.WriteNumber(budget[heapIndex].budget);
17803 json.WriteString(
"Stats");
17804 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17807 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17809 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17811 json.BeginString(
"Type ");
17812 json.ContinueString(typeIndex);
17815 json.BeginObject();
17817 json.WriteString(
"Flags");
17818 json.BeginArray(
true);
17819 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17820 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17822 json.WriteString(
"DEVICE_LOCAL");
17824 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17826 json.WriteString(
"HOST_VISIBLE");
17828 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17830 json.WriteString(
"HOST_COHERENT");
17832 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17834 json.WriteString(
"HOST_CACHED");
17836 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17838 json.WriteString(
"LAZILY_ALLOCATED");
17840 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17842 json.WriteString(
" PROTECTED");
17844 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17846 json.WriteString(
" DEVICE_COHERENT");
17848 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17850 json.WriteString(
" DEVICE_UNCACHED");
17856 json.WriteString(
"Stats");
17857 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17866 if(detailedMap == VK_TRUE)
17868 allocator->PrintDetailedMap(json);
17874 const size_t len = sb.GetLength();
17875 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17878 memcpy(pChars, sb.GetData(), len);
17880 pChars[len] =
'\0';
17881 *ppStatsString = pChars;
17886 char* pStatsString)
17888 if(pStatsString != VMA_NULL)
17890 VMA_ASSERT(allocator);
17891 size_t len = strlen(pStatsString);
17892 vma_delete_array(allocator, pStatsString, len + 1);
17896 #endif // #if VMA_STATS_STRING_ENABLED
17903 uint32_t memoryTypeBits,
17905 uint32_t* pMemoryTypeIndex)
17907 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17908 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17909 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17911 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17918 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17919 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17920 uint32_t notPreferredFlags = 0;
17923 switch(pAllocationCreateInfo->
usage)
17928 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17930 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17934 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17937 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17938 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17940 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17944 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17945 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17948 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17951 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17960 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17962 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17965 *pMemoryTypeIndex = UINT32_MAX;
17966 uint32_t minCost = UINT32_MAX;
17967 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17968 memTypeIndex < allocator->GetMemoryTypeCount();
17969 ++memTypeIndex, memTypeBit <<= 1)
17972 if((memTypeBit & memoryTypeBits) != 0)
17974 const VkMemoryPropertyFlags currFlags =
17975 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17977 if((requiredFlags & ~currFlags) == 0)
17980 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17981 VmaCountBitsSet(currFlags & notPreferredFlags);
17983 if(currCost < minCost)
17985 *pMemoryTypeIndex = memTypeIndex;
17990 minCost = currCost;
17995 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18000 const VkBufferCreateInfo* pBufferCreateInfo,
18002 uint32_t* pMemoryTypeIndex)
18004 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18005 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18006 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18007 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18009 const VkDevice hDev = allocator->m_hDevice;
18010 VkBuffer hBuffer = VK_NULL_HANDLE;
18011 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18012 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18013 if(res == VK_SUCCESS)
18015 VkMemoryRequirements memReq = {};
18016 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18017 hDev, hBuffer, &memReq);
18021 memReq.memoryTypeBits,
18022 pAllocationCreateInfo,
18025 allocator->GetVulkanFunctions().vkDestroyBuffer(
18026 hDev, hBuffer, allocator->GetAllocationCallbacks());
18033 const VkImageCreateInfo* pImageCreateInfo,
18035 uint32_t* pMemoryTypeIndex)
18037 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18038 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18039 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18040 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18042 const VkDevice hDev = allocator->m_hDevice;
18043 VkImage hImage = VK_NULL_HANDLE;
18044 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18045 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18046 if(res == VK_SUCCESS)
18048 VkMemoryRequirements memReq = {};
18049 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18050 hDev, hImage, &memReq);
18054 memReq.memoryTypeBits,
18055 pAllocationCreateInfo,
18058 allocator->GetVulkanFunctions().vkDestroyImage(
18059 hDev, hImage, allocator->GetAllocationCallbacks());
18069 VMA_ASSERT(allocator && pCreateInfo && pPool);
18071 VMA_DEBUG_LOG(
"vmaCreatePool");
18073 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18075 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18077 #if VMA_RECORDING_ENABLED
18078 if(allocator->GetRecorder() != VMA_NULL)
18080 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18091 VMA_ASSERT(allocator);
18093 if(pool == VK_NULL_HANDLE)
18098 VMA_DEBUG_LOG(
"vmaDestroyPool");
18100 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18102 #if VMA_RECORDING_ENABLED
18103 if(allocator->GetRecorder() != VMA_NULL)
18105 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18109 allocator->DestroyPool(pool);
18117 VMA_ASSERT(allocator && pool && pPoolStats);
18119 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18121 allocator->GetPoolStats(pool, pPoolStats);
18127 size_t* pLostAllocationCount)
18129 VMA_ASSERT(allocator && pool);
18131 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18133 #if VMA_RECORDING_ENABLED
18134 if(allocator->GetRecorder() != VMA_NULL)
18136 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18140 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18145 VMA_ASSERT(allocator && pool);
18147 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18149 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18151 return allocator->CheckPoolCorruption(pool);
18157 const char** ppName)
18159 VMA_ASSERT(allocator && pool && ppName);
18161 VMA_DEBUG_LOG(
"vmaGetPoolName");
18163 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18165 *ppName = pool->GetName();
18173 VMA_ASSERT(allocator && pool);
18175 VMA_DEBUG_LOG(
"vmaSetPoolName");
18177 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18179 pool->SetName(pName);
18181 #if VMA_RECORDING_ENABLED
18182 if(allocator->GetRecorder() != VMA_NULL)
18184 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18191 const VkMemoryRequirements* pVkMemoryRequirements,
18196 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18198 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18200 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18202 VkResult result = allocator->AllocateMemory(
18203 *pVkMemoryRequirements,
18210 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18214 #if VMA_RECORDING_ENABLED
18215 if(allocator->GetRecorder() != VMA_NULL)
18217 allocator->GetRecorder()->RecordAllocateMemory(
18218 allocator->GetCurrentFrameIndex(),
18219 *pVkMemoryRequirements,
18225 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18227 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18235 const VkMemoryRequirements* pVkMemoryRequirements,
18237 size_t allocationCount,
18241 if(allocationCount == 0)
18246 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18248 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18250 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18252 VkResult result = allocator->AllocateMemory(
18253 *pVkMemoryRequirements,
18260 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18264 #if VMA_RECORDING_ENABLED
18265 if(allocator->GetRecorder() != VMA_NULL)
18267 allocator->GetRecorder()->RecordAllocateMemoryPages(
18268 allocator->GetCurrentFrameIndex(),
18269 *pVkMemoryRequirements,
18271 (uint64_t)allocationCount,
18276 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18278 for(
size_t i = 0; i < allocationCount; ++i)
18280 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18294 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18296 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18298 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18300 VkMemoryRequirements vkMemReq = {};
18301 bool requiresDedicatedAllocation =
false;
18302 bool prefersDedicatedAllocation =
false;
18303 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18304 requiresDedicatedAllocation,
18305 prefersDedicatedAllocation);
18307 VkResult result = allocator->AllocateMemory(
18309 requiresDedicatedAllocation,
18310 prefersDedicatedAllocation,
18315 VMA_SUBALLOCATION_TYPE_BUFFER,
18319 #if VMA_RECORDING_ENABLED
18320 if(allocator->GetRecorder() != VMA_NULL)
18322 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18323 allocator->GetCurrentFrameIndex(),
18325 requiresDedicatedAllocation,
18326 prefersDedicatedAllocation,
18332 if(pAllocationInfo && result == VK_SUCCESS)
18334 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18347 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18349 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18351 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18353 VkMemoryRequirements vkMemReq = {};
18354 bool requiresDedicatedAllocation =
false;
18355 bool prefersDedicatedAllocation =
false;
18356 allocator->GetImageMemoryRequirements(image, vkMemReq,
18357 requiresDedicatedAllocation, prefersDedicatedAllocation);
18359 VkResult result = allocator->AllocateMemory(
18361 requiresDedicatedAllocation,
18362 prefersDedicatedAllocation,
18367 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18371 #if VMA_RECORDING_ENABLED
18372 if(allocator->GetRecorder() != VMA_NULL)
18374 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18375 allocator->GetCurrentFrameIndex(),
18377 requiresDedicatedAllocation,
18378 prefersDedicatedAllocation,
18384 if(pAllocationInfo && result == VK_SUCCESS)
18386 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18396 VMA_ASSERT(allocator);
18398 if(allocation == VK_NULL_HANDLE)
18403 VMA_DEBUG_LOG(
"vmaFreeMemory");
18405 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18407 #if VMA_RECORDING_ENABLED
18408 if(allocator->GetRecorder() != VMA_NULL)
18410 allocator->GetRecorder()->RecordFreeMemory(
18411 allocator->GetCurrentFrameIndex(),
18416 allocator->FreeMemory(
18423 size_t allocationCount,
18426 if(allocationCount == 0)
18431 VMA_ASSERT(allocator);
18433 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18435 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18437 #if VMA_RECORDING_ENABLED
18438 if(allocator->GetRecorder() != VMA_NULL)
18440 allocator->GetRecorder()->RecordFreeMemoryPages(
18441 allocator->GetCurrentFrameIndex(),
18442 (uint64_t)allocationCount,
18447 allocator->FreeMemory(allocationCount, pAllocations);
18453 VkDeviceSize newSize)
18455 VMA_ASSERT(allocator && allocation);
18457 VMA_DEBUG_LOG(
"vmaResizeAllocation");
18459 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18461 return allocator->ResizeAllocation(allocation, newSize);
18469 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18471 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18473 #if VMA_RECORDING_ENABLED
18474 if(allocator->GetRecorder() != VMA_NULL)
18476 allocator->GetRecorder()->RecordGetAllocationInfo(
18477 allocator->GetCurrentFrameIndex(),
18482 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18489 VMA_ASSERT(allocator && allocation);
18491 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18493 #if VMA_RECORDING_ENABLED
18494 if(allocator->GetRecorder() != VMA_NULL)
18496 allocator->GetRecorder()->RecordTouchAllocation(
18497 allocator->GetCurrentFrameIndex(),
18502 return allocator->TouchAllocation(allocation);
18510 VMA_ASSERT(allocator && allocation);
18512 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18514 allocation->SetUserData(allocator, pUserData);
18516 #if VMA_RECORDING_ENABLED
18517 if(allocator->GetRecorder() != VMA_NULL)
18519 allocator->GetRecorder()->RecordSetAllocationUserData(
18520 allocator->GetCurrentFrameIndex(),
18531 VMA_ASSERT(allocator && pAllocation);
18533 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18535 allocator->CreateLostAllocation(pAllocation);
18537 #if VMA_RECORDING_ENABLED
18538 if(allocator->GetRecorder() != VMA_NULL)
18540 allocator->GetRecorder()->RecordCreateLostAllocation(
18541 allocator->GetCurrentFrameIndex(),
18552 VMA_ASSERT(allocator && allocation && ppData);
18554 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18556 VkResult res = allocator->Map(allocation, ppData);
18558 #if VMA_RECORDING_ENABLED
18559 if(allocator->GetRecorder() != VMA_NULL)
18561 allocator->GetRecorder()->RecordMapMemory(
18562 allocator->GetCurrentFrameIndex(),
18574 VMA_ASSERT(allocator && allocation);
18576 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18578 #if VMA_RECORDING_ENABLED
18579 if(allocator->GetRecorder() != VMA_NULL)
18581 allocator->GetRecorder()->RecordUnmapMemory(
18582 allocator->GetCurrentFrameIndex(),
18587 allocator->Unmap(allocation);
18592 VMA_ASSERT(allocator && allocation);
18594 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18596 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18598 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18600 #if VMA_RECORDING_ENABLED
18601 if(allocator->GetRecorder() != VMA_NULL)
18603 allocator->GetRecorder()->RecordFlushAllocation(
18604 allocator->GetCurrentFrameIndex(),
18605 allocation, offset, size);
18614 VMA_ASSERT(allocator && allocation);
18616 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18618 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18620 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18622 #if VMA_RECORDING_ENABLED
18623 if(allocator->GetRecorder() != VMA_NULL)
18625 allocator->GetRecorder()->RecordInvalidateAllocation(
18626 allocator->GetCurrentFrameIndex(),
18627 allocation, offset, size);
18636 uint32_t allocationCount,
18638 const VkDeviceSize* offsets,
18639 const VkDeviceSize* sizes)
18641 VMA_ASSERT(allocator);
18643 if(allocationCount == 0)
18648 VMA_ASSERT(allocations);
18650 VMA_DEBUG_LOG(
"vmaFlushAllocations");
18652 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18654 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
18656 #if VMA_RECORDING_ENABLED
18657 if(allocator->GetRecorder() != VMA_NULL)
18668 uint32_t allocationCount,
18670 const VkDeviceSize* offsets,
18671 const VkDeviceSize* sizes)
18673 VMA_ASSERT(allocator);
18675 if(allocationCount == 0)
18680 VMA_ASSERT(allocations);
18682 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
18684 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18686 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
18688 #if VMA_RECORDING_ENABLED
18689 if(allocator->GetRecorder() != VMA_NULL)
18700 VMA_ASSERT(allocator);
18702 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18704 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18706 return allocator->CheckCorruption(memoryTypeBits);
18712 size_t allocationCount,
18713 VkBool32* pAllocationsChanged,
18723 if(pDefragmentationInfo != VMA_NULL)
18737 if(res == VK_NOT_READY)
18750 VMA_ASSERT(allocator && pInfo && pContext);
18761 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18763 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18765 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18767 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18769 #if VMA_RECORDING_ENABLED
18770 if(allocator->GetRecorder() != VMA_NULL)
18772 allocator->GetRecorder()->RecordDefragmentationBegin(
18773 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18784 VMA_ASSERT(allocator);
18786 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18788 if(context != VK_NULL_HANDLE)
18790 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18792 #if VMA_RECORDING_ENABLED
18793 if(allocator->GetRecorder() != VMA_NULL)
18795 allocator->GetRecorder()->RecordDefragmentationEnd(
18796 allocator->GetCurrentFrameIndex(), context);
18800 return allocator->DefragmentationEnd(context);
18814 VMA_ASSERT(allocator);
18817 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18819 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18821 if(context == VK_NULL_HANDLE)
18827 return allocator->DefragmentationPassBegin(pInfo, context);
18833 VMA_ASSERT(allocator);
18835 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18836 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18838 if(context == VK_NULL_HANDLE)
18841 return allocator->DefragmentationPassEnd(context);
18849 VMA_ASSERT(allocator && allocation && buffer);
18851 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18853 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18855 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18861 VkDeviceSize allocationLocalOffset,
18865 VMA_ASSERT(allocator && allocation && buffer);
18867 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18869 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18871 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18879 VMA_ASSERT(allocator && allocation && image);
18881 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18883 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18885 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18891 VkDeviceSize allocationLocalOffset,
18895 VMA_ASSERT(allocator && allocation && image);
18897 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18899 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18901 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18906 const VkBufferCreateInfo* pBufferCreateInfo,
18912 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18914 if(pBufferCreateInfo->size == 0)
18916 return VK_ERROR_VALIDATION_FAILED_EXT;
18918 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18919 !allocator->m_UseKhrBufferDeviceAddress)
18921 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18922 return VK_ERROR_VALIDATION_FAILED_EXT;
18925 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18927 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18929 *pBuffer = VK_NULL_HANDLE;
18930 *pAllocation = VK_NULL_HANDLE;
18933 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18934 allocator->m_hDevice,
18936 allocator->GetAllocationCallbacks(),
18941 VkMemoryRequirements vkMemReq = {};
18942 bool requiresDedicatedAllocation =
false;
18943 bool prefersDedicatedAllocation =
false;
18944 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18945 requiresDedicatedAllocation, prefersDedicatedAllocation);
18948 res = allocator->AllocateMemory(
18950 requiresDedicatedAllocation,
18951 prefersDedicatedAllocation,
18953 pBufferCreateInfo->usage,
18955 *pAllocationCreateInfo,
18956 VMA_SUBALLOCATION_TYPE_BUFFER,
18960 #if VMA_RECORDING_ENABLED
18961 if(allocator->GetRecorder() != VMA_NULL)
18963 allocator->GetRecorder()->RecordCreateBuffer(
18964 allocator->GetCurrentFrameIndex(),
18965 *pBufferCreateInfo,
18966 *pAllocationCreateInfo,
18976 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18981 #if VMA_STATS_STRING_ENABLED
18982 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18984 if(pAllocationInfo != VMA_NULL)
18986 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18991 allocator->FreeMemory(
18994 *pAllocation = VK_NULL_HANDLE;
18995 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18996 *pBuffer = VK_NULL_HANDLE;
18999 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19000 *pBuffer = VK_NULL_HANDLE;
19011 VMA_ASSERT(allocator);
19013 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19018 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19020 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19022 #if VMA_RECORDING_ENABLED
19023 if(allocator->GetRecorder() != VMA_NULL)
19025 allocator->GetRecorder()->RecordDestroyBuffer(
19026 allocator->GetCurrentFrameIndex(),
19031 if(buffer != VK_NULL_HANDLE)
19033 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19036 if(allocation != VK_NULL_HANDLE)
19038 allocator->FreeMemory(
19046 const VkImageCreateInfo* pImageCreateInfo,
19052 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19054 if(pImageCreateInfo->extent.width == 0 ||
19055 pImageCreateInfo->extent.height == 0 ||
19056 pImageCreateInfo->extent.depth == 0 ||
19057 pImageCreateInfo->mipLevels == 0 ||
19058 pImageCreateInfo->arrayLayers == 0)
19060 return VK_ERROR_VALIDATION_FAILED_EXT;
19063 VMA_DEBUG_LOG(
"vmaCreateImage");
19065 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19067 *pImage = VK_NULL_HANDLE;
19068 *pAllocation = VK_NULL_HANDLE;
19071 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19072 allocator->m_hDevice,
19074 allocator->GetAllocationCallbacks(),
19078 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19079 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19080 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19083 VkMemoryRequirements vkMemReq = {};
19084 bool requiresDedicatedAllocation =
false;
19085 bool prefersDedicatedAllocation =
false;
19086 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19087 requiresDedicatedAllocation, prefersDedicatedAllocation);
19089 res = allocator->AllocateMemory(
19091 requiresDedicatedAllocation,
19092 prefersDedicatedAllocation,
19096 *pAllocationCreateInfo,
19101 #if VMA_RECORDING_ENABLED
19102 if(allocator->GetRecorder() != VMA_NULL)
19104 allocator->GetRecorder()->RecordCreateImage(
19105 allocator->GetCurrentFrameIndex(),
19107 *pAllocationCreateInfo,
19117 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19122 #if VMA_STATS_STRING_ENABLED
19123 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19125 if(pAllocationInfo != VMA_NULL)
19127 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19132 allocator->FreeMemory(
19135 *pAllocation = VK_NULL_HANDLE;
19136 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19137 *pImage = VK_NULL_HANDLE;
19140 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19141 *pImage = VK_NULL_HANDLE;
19152 VMA_ASSERT(allocator);
19154 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19159 VMA_DEBUG_LOG(
"vmaDestroyImage");
19161 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19163 #if VMA_RECORDING_ENABLED
19164 if(allocator->GetRecorder() != VMA_NULL)
19166 allocator->GetRecorder()->RecordDestroyImage(
19167 allocator->GetCurrentFrameIndex(),
19172 if(image != VK_NULL_HANDLE)
19174 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19176 if(allocation != VK_NULL_HANDLE)
19178 allocator->FreeMemory(
19184 #endif // #ifdef VMA_IMPLEMENTATION