23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2028 #ifndef VMA_RECORDING_ENABLED
2029 #define VMA_RECORDING_ENABLED 0
2032 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2036 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2037 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2038 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2039 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2040 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2041 extern PFN_vkAllocateMemory vkAllocateMemory;
2042 extern PFN_vkFreeMemory vkFreeMemory;
2043 extern PFN_vkMapMemory vkMapMemory;
2044 extern PFN_vkUnmapMemory vkUnmapMemory;
2045 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2046 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2047 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2048 extern PFN_vkBindImageMemory vkBindImageMemory;
2049 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2050 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2051 extern PFN_vkCreateBuffer vkCreateBuffer;
2052 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2053 extern PFN_vkCreateImage vkCreateImage;
2054 extern PFN_vkDestroyImage vkDestroyImage;
2055 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2056 #if VMA_VULKAN_VERSION >= 1001000
2057 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2058 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2059 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2060 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2061 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2066 #include <vulkan/vulkan.h>
2072 #if !defined(VMA_VULKAN_VERSION)
2073 #if defined(VK_VERSION_1_2)
2074 #define VMA_VULKAN_VERSION 1002000
2075 #elif defined(VK_VERSION_1_1)
2076 #define VMA_VULKAN_VERSION 1001000
2078 #define VMA_VULKAN_VERSION 1000000
2082 #if !defined(VMA_DEDICATED_ALLOCATION)
2083 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2084 #define VMA_DEDICATED_ALLOCATION 1
2086 #define VMA_DEDICATED_ALLOCATION 0
2090 #if !defined(VMA_BIND_MEMORY2)
2091 #if VK_KHR_bind_memory2
2092 #define VMA_BIND_MEMORY2 1
2094 #define VMA_BIND_MEMORY2 0
2098 #if !defined(VMA_MEMORY_BUDGET)
2099 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2100 #define VMA_MEMORY_BUDGET 1
2102 #define VMA_MEMORY_BUDGET 0
2107 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2108 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2109 #define VMA_BUFFER_DEVICE_ADDRESS 1
2111 #define VMA_BUFFER_DEVICE_ADDRESS 0
2116 #if !defined(VMA_MEMORY_PRIORITY)
2117 #if VK_EXT_memory_priority
2118 #define VMA_MEMORY_PRIORITY 1
2120 #define VMA_MEMORY_PRIORITY 0
2129 #ifndef VMA_CALL_PRE
2130 #define VMA_CALL_PRE
2132 #ifndef VMA_CALL_POST
2133 #define VMA_CALL_POST
2147 #ifndef VMA_LEN_IF_NOT_NULL
2148 #define VMA_LEN_IF_NOT_NULL(len)
2153 #ifndef VMA_NULLABLE
2155 #define VMA_NULLABLE _Nullable
2157 #define VMA_NULLABLE
2163 #ifndef VMA_NOT_NULL
2165 #define VMA_NOT_NULL _Nonnull
2167 #define VMA_NOT_NULL
2173 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2174 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2175 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2177 #define VMA_NOT_NULL_NON_DISPATCHABLE
2181 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2182 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2183 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2185 #define VMA_NULLABLE_NON_DISPATCHABLE
2203 uint32_t memoryType,
2204 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2206 void* VMA_NULLABLE pUserData);
2210 uint32_t memoryType,
2211 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2213 void* VMA_NULLABLE pUserData);
2370 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2371 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2372 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2374 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2375 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2376 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2378 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2379 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2469 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2542 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2550 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2560 uint32_t memoryTypeIndex,
2561 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2573 uint32_t frameIndex);
2669 #ifndef VMA_STATS_STRING_ENABLED
2670 #define VMA_STATS_STRING_ENABLED 1
2673 #if VMA_STATS_STRING_ENABLED
2680 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2681 VkBool32 detailedMap);
2685 char* VMA_NULLABLE pStatsString);
2946 uint32_t memoryTypeBits,
2948 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2964 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2966 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2982 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2984 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3141 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3169 size_t* VMA_NULLABLE pLostAllocationCount);
3196 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3206 const char* VMA_NULLABLE pName);
3300 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3326 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3328 size_t allocationCount,
3329 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3330 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3340 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3348 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3373 size_t allocationCount,
3374 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3431 void* VMA_NULLABLE pUserData);
3488 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3526 VkDeviceSize offset,
3553 VkDeviceSize offset,
3572 uint32_t allocationCount,
3573 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3574 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3575 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3593 uint32_t allocationCount,
3594 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3595 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3596 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3675 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3709 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3847 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3848 size_t allocationCount,
3849 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3868 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3883 VkDeviceSize allocationLocalOffset,
3884 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3885 const void* VMA_NULLABLE pNext);
3902 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3917 VkDeviceSize allocationLocalOffset,
3918 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3919 const void* VMA_NULLABLE pNext);
3953 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3955 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3972 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3978 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3980 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3997 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
4007 #if defined(__cplusplus) && defined(__INTELLISENSE__)
4008 #define VMA_IMPLEMENTATION
4011 #ifdef VMA_IMPLEMENTATION
4012 #undef VMA_IMPLEMENTATION
4019 #if VMA_RECORDING_ENABLED
4022 #include <windows.h>
4042 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4043 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4052 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4053 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4054 #if defined(VK_NO_PROTOTYPES)
4055 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4056 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4069 #if VMA_USE_STL_CONTAINERS
4070 #define VMA_USE_STL_VECTOR 1
4071 #define VMA_USE_STL_UNORDERED_MAP 1
4072 #define VMA_USE_STL_LIST 1
4075 #ifndef VMA_USE_STL_SHARED_MUTEX
4077 #if __cplusplus >= 201703L
4078 #define VMA_USE_STL_SHARED_MUTEX 1
4082 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4083 #define VMA_USE_STL_SHARED_MUTEX 1
4085 #define VMA_USE_STL_SHARED_MUTEX 0
4093 #if VMA_USE_STL_VECTOR
4097 #if VMA_USE_STL_UNORDERED_MAP
4098 #include <unordered_map>
4101 #if VMA_USE_STL_LIST
4110 #include <algorithm>
4115 #define VMA_NULL nullptr
4118 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4120 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4123 if(alignment <
sizeof(
void*))
4125 alignment =
sizeof(
void*);
4128 return memalign(alignment, size);
4130 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4133 #if defined(__APPLE__)
4134 #include <AvailabilityMacros.h>
4137 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4139 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4140 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4147 if (__builtin_available(macOS 10.15, iOS 13, *))
4148 return aligned_alloc(alignment, size);
4152 if(alignment <
sizeof(
void*))
4154 alignment =
sizeof(
void*);
4158 if(posix_memalign(&pointer, alignment, size) == 0)
4162 #elif defined(_WIN32)
4163 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4165 return _aligned_malloc(size, alignment);
4168 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4170 return aligned_alloc(alignment, size);
4175 static void vma_aligned_free(
void* ptr)
4180 static void vma_aligned_free(
void* VMA_NULLABLE ptr)
4194 #define VMA_ASSERT(expr)
4196 #define VMA_ASSERT(expr) assert(expr)
4202 #ifndef VMA_HEAVY_ASSERT
4204 #define VMA_HEAVY_ASSERT(expr)
4206 #define VMA_HEAVY_ASSERT(expr)
4210 #ifndef VMA_ALIGN_OF
4211 #define VMA_ALIGN_OF(type) (__alignof(type))
4214 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4215 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4218 #ifndef VMA_SYSTEM_ALIGNED_FREE
4220 #if defined(VMA_SYSTEM_FREE)
4221 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4223 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4228 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4232 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4236 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4240 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4243 #ifndef VMA_DEBUG_LOG
4244 #define VMA_DEBUG_LOG(format, ...)
4254 #if VMA_STATS_STRING_ENABLED
4255 static inline void VmaUint32ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint32_t num)
4257 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4259 static inline void VmaUint64ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint64_t num)
4261 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4263 static inline void VmaPtrToStr(
char* VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
4265 snprintf(outStr, strLen,
"%p", ptr);
4273 void Lock() { m_Mutex.lock(); }
4274 void Unlock() { m_Mutex.unlock(); }
4275 bool TryLock() {
return m_Mutex.try_lock(); }
4279 #define VMA_MUTEX VmaMutex
4283 #ifndef VMA_RW_MUTEX
4284 #if VMA_USE_STL_SHARED_MUTEX
4286 #include <shared_mutex>
4290 void LockRead() { m_Mutex.lock_shared(); }
4291 void UnlockRead() { m_Mutex.unlock_shared(); }
4292 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4293 void LockWrite() { m_Mutex.lock(); }
4294 void UnlockWrite() { m_Mutex.unlock(); }
4295 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4297 std::shared_mutex m_Mutex;
4299 #define VMA_RW_MUTEX VmaRWMutex
4300 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4306 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4307 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4308 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4309 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4310 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4311 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4312 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4316 #define VMA_RW_MUTEX VmaRWMutex
4322 void LockRead() { m_Mutex.Lock(); }
4323 void UnlockRead() { m_Mutex.Unlock(); }
4324 bool TryLockRead() {
return m_Mutex.TryLock(); }
4325 void LockWrite() { m_Mutex.Lock(); }
4326 void UnlockWrite() { m_Mutex.Unlock(); }
4327 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4331 #define VMA_RW_MUTEX VmaRWMutex
4338 #ifndef VMA_ATOMIC_UINT32
4340 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4343 #ifndef VMA_ATOMIC_UINT64
4345 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4348 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4353 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4356 #ifndef VMA_MIN_ALIGNMENT
4361 #ifdef VMA_DEBUG_ALIGNMENT
4362 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT
4364 #define VMA_MIN_ALIGNMENT (1)
4368 #ifndef VMA_DEBUG_MARGIN
4373 #define VMA_DEBUG_MARGIN (0)
4376 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4381 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4384 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4390 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4393 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4398 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4401 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4406 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4409 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
4414 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
4417 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4419 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4422 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4424 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4427 #ifndef VMA_CLASS_NO_COPY
4428 #define VMA_CLASS_NO_COPY(className) \
4430 className(const className&) = delete; \
4431 className& operator=(const className&) = delete;
4434 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4437 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4439 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4440 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4448 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4449 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4450 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4452 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4454 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4455 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4458 static inline uint32_t VmaCountBitsSet(uint32_t v)
4460 uint32_t c = v - ((v >> 1) & 0x55555555);
4461 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4462 c = ((c >> 4) + c) & 0x0F0F0F0F;
4463 c = ((c >> 8) + c) & 0x00FF00FF;
4464 c = ((c >> 16) + c) & 0x0000FFFF;
4473 template <
typename T>
4474 inline bool VmaIsPow2(T x)
4476 return (x & (x-1)) == 0;
4481 template <
typename T>
4482 static inline T VmaAlignUp(T val, T alignment)
4484 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4485 return (val + alignment - 1) & ~(alignment - 1);
4489 template <
typename T>
4490 static inline T VmaAlignDown(T val, T alignment)
4492 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4493 return val & ~(alignment - 1);
4497 template <
typename T>
4498 static inline T VmaRoundDiv(T x, T y)
4500 return (x + (y / (T)2)) / y;
4504 static inline uint32_t VmaNextPow2(uint32_t v)
4515 static inline uint64_t VmaNextPow2(uint64_t v)
4529 static inline uint32_t VmaPrevPow2(uint32_t v)
4539 static inline uint64_t VmaPrevPow2(uint64_t v)
4551 static inline bool VmaStrIsEmpty(
const char* pStr)
4553 return pStr == VMA_NULL || *pStr ==
'\0';
4556 #if VMA_STATS_STRING_ENABLED
4558 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4578 template<
typename Iterator,
typename Compare>
4579 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4581 Iterator centerValue = end; --centerValue;
4582 Iterator insertIndex = beg;
4583 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4585 if(cmp(*memTypeIndex, *centerValue))
4587 if(insertIndex != memTypeIndex)
4589 VMA_SWAP(*memTypeIndex, *insertIndex);
4594 if(insertIndex != centerValue)
4596 VMA_SWAP(*insertIndex, *centerValue);
4601 template<
typename Iterator,
typename Compare>
4602 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4606 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4607 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4608 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4612 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4623 static inline bool VmaBlocksOnSamePage(
4624 VkDeviceSize resourceAOffset,
4625 VkDeviceSize resourceASize,
4626 VkDeviceSize resourceBOffset,
4627 VkDeviceSize pageSize)
4629 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4630 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4631 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4632 VkDeviceSize resourceBStart = resourceBOffset;
4633 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4634 return resourceAEndPage == resourceBStartPage;
4637 enum VmaSuballocationType
4639 VMA_SUBALLOCATION_TYPE_FREE = 0,
4640 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4641 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4642 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4643 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4644 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4645 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4654 static inline bool VmaIsBufferImageGranularityConflict(
4655 VmaSuballocationType suballocType1,
4656 VmaSuballocationType suballocType2)
4658 if(suballocType1 > suballocType2)
4660 VMA_SWAP(suballocType1, suballocType2);
4663 switch(suballocType1)
4665 case VMA_SUBALLOCATION_TYPE_FREE:
4667 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4669 case VMA_SUBALLOCATION_TYPE_BUFFER:
4671 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4672 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4673 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4675 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4676 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4677 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4678 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4680 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4681 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4689 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4691 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4692 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4693 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4694 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4696 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4703 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4705 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4706 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4707 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4708 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4710 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4723 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4725 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4726 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4727 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4728 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4734 VMA_CLASS_NO_COPY(VmaMutexLock)
4736 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4737 m_pMutex(useMutex ? &mutex : VMA_NULL)
4738 {
if(m_pMutex) { m_pMutex->Lock(); } }
4740 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4742 VMA_MUTEX* m_pMutex;
4746 struct VmaMutexLockRead
4748 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4750 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4751 m_pMutex(useMutex ? &mutex : VMA_NULL)
4752 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4753 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4755 VMA_RW_MUTEX* m_pMutex;
4759 struct VmaMutexLockWrite
4761 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4763 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4764 m_pMutex(useMutex ? &mutex : VMA_NULL)
4765 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4766 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4768 VMA_RW_MUTEX* m_pMutex;
4771 #if VMA_DEBUG_GLOBAL_MUTEX
4772 static VMA_MUTEX gDebugGlobalMutex;
4773 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4775 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4779 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4790 template <
typename CmpLess,
typename IterT,
typename KeyT>
4791 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4793 size_t down = 0, up = (end - beg);
4796 const size_t mid = down + (up - down) / 2;
4797 if(cmp(*(beg+mid), key))
4809 template<
typename CmpLess,
typename IterT,
typename KeyT>
4810 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4812 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4813 beg, end, value, cmp);
4815 (!cmp(*it, value) && !cmp(value, *it)))
4827 template<
typename T>
4828 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4830 for(uint32_t i = 0; i < count; ++i)
4832 const T iPtr = arr[i];
4833 if(iPtr == VMA_NULL)
4837 for(uint32_t j = i + 1; j < count; ++j)
4848 template<
typename MainT,
typename NewT>
4849 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4851 newStruct->pNext = mainStruct->pNext;
4852 mainStruct->pNext = newStruct;
4858 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4860 void* result = VMA_NULL;
4861 if((pAllocationCallbacks != VMA_NULL) &&
4862 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4864 result = (*pAllocationCallbacks->pfnAllocation)(
4865 pAllocationCallbacks->pUserData,
4868 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4872 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4874 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4878 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4880 if((pAllocationCallbacks != VMA_NULL) &&
4881 (pAllocationCallbacks->pfnFree != VMA_NULL))
4883 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4887 VMA_SYSTEM_ALIGNED_FREE(ptr);
4891 template<
typename T>
4892 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4894 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4897 template<
typename T>
4898 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4900 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4903 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4905 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4907 template<
typename T>
4908 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4911 VmaFree(pAllocationCallbacks, ptr);
4914 template<
typename T>
4915 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4919 for(
size_t i = count; i--; )
4923 VmaFree(pAllocationCallbacks, ptr);
4927 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4929 if(srcStr != VMA_NULL)
4931 const size_t len = strlen(srcStr);
4932 char*
const result = vma_new_array(allocs,
char, len + 1);
4933 memcpy(result, srcStr, len + 1);
4942 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4946 const size_t len = strlen(str);
4947 vma_delete_array(allocs, str, len + 1);
4952 template<
typename T>
4953 class VmaStlAllocator
4956 const VkAllocationCallbacks*
const m_pCallbacks;
4957 typedef T value_type;
4959 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4960 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4962 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4963 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4965 template<
typename U>
4966 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4968 return m_pCallbacks == rhs.m_pCallbacks;
4970 template<
typename U>
4971 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4973 return m_pCallbacks != rhs.m_pCallbacks;
4976 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4977 VmaStlAllocator(
const VmaStlAllocator&) =
default;
4980 #if VMA_USE_STL_VECTOR
4982 #define VmaVector std::vector
4984 template<
typename T,
typename allocatorT>
4985 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4987 vec.insert(vec.begin() + index, item);
4990 template<
typename T,
typename allocatorT>
4991 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4993 vec.erase(vec.begin() + index);
5001 template<
typename T,
typename AllocatorT>
5005 typedef T value_type;
5007 VmaVector(
const AllocatorT& allocator) :
5008 m_Allocator(allocator),
5015 VmaVector(
size_t count,
const AllocatorT& allocator) :
5016 m_Allocator(allocator),
5017 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
5025 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
5026 : VmaVector(count, allocator) {}
5028 VmaVector(
const VmaVector<T, AllocatorT>& src) :
5029 m_Allocator(src.m_Allocator),
5030 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
5031 m_Count(src.m_Count),
5032 m_Capacity(src.m_Count)
5036 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
5042 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5045 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5049 resize(rhs.m_Count);
5052 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5058 bool empty()
const {
return m_Count == 0; }
5059 size_t size()
const {
return m_Count; }
5060 T* data() {
return m_pArray; }
5061 const T* data()
const {
return m_pArray; }
5063 T& operator[](
size_t index)
5065 VMA_HEAVY_ASSERT(index < m_Count);
5066 return m_pArray[index];
5068 const T& operator[](
size_t index)
const
5070 VMA_HEAVY_ASSERT(index < m_Count);
5071 return m_pArray[index];
5076 VMA_HEAVY_ASSERT(m_Count > 0);
5079 const T& front()
const
5081 VMA_HEAVY_ASSERT(m_Count > 0);
5086 VMA_HEAVY_ASSERT(m_Count > 0);
5087 return m_pArray[m_Count - 1];
5089 const T& back()
const
5091 VMA_HEAVY_ASSERT(m_Count > 0);
5092 return m_pArray[m_Count - 1];
5095 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5097 newCapacity = VMA_MAX(newCapacity, m_Count);
5099 if((newCapacity < m_Capacity) && !freeMemory)
5101 newCapacity = m_Capacity;
5104 if(newCapacity != m_Capacity)
5106 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5109 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5111 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5112 m_Capacity = newCapacity;
5113 m_pArray = newArray;
5117 void resize(
size_t newCount)
5119 size_t newCapacity = m_Capacity;
5120 if(newCount > m_Capacity)
5122 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5125 if(newCapacity != m_Capacity)
5127 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5128 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5129 if(elementsToCopy != 0)
5131 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5133 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5134 m_Capacity = newCapacity;
5135 m_pArray = newArray;
5146 void shrink_to_fit()
5148 if(m_Capacity > m_Count)
5150 T* newArray = VMA_NULL;
5153 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
5154 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5156 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5157 m_Capacity = m_Count;
5158 m_pArray = newArray;
5162 void insert(
size_t index,
const T& src)
5164 VMA_HEAVY_ASSERT(index <= m_Count);
5165 const size_t oldCount = size();
5166 resize(oldCount + 1);
5167 if(index < oldCount)
5169 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5171 m_pArray[index] = src;
5174 void remove(
size_t index)
5176 VMA_HEAVY_ASSERT(index < m_Count);
5177 const size_t oldCount = size();
5178 if(index < oldCount - 1)
5180 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5182 resize(oldCount - 1);
5185 void push_back(
const T& src)
5187 const size_t newIndex = size();
5188 resize(newIndex + 1);
5189 m_pArray[newIndex] = src;
5194 VMA_HEAVY_ASSERT(m_Count > 0);
5198 void push_front(
const T& src)
5205 VMA_HEAVY_ASSERT(m_Count > 0);
5209 typedef T* iterator;
5211 iterator begin() {
return m_pArray; }
5212 iterator end() {
return m_pArray + m_Count; }
5215 AllocatorT m_Allocator;
5221 template<
typename T,
typename allocatorT>
5222 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5224 vec.insert(index, item);
5227 template<
typename T,
typename allocatorT>
5228 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5235 template<
typename CmpLess,
typename VectorT>
5236 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5238 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5240 vector.data() + vector.size(),
5242 CmpLess()) - vector.data();
5243 VmaVectorInsert(vector, indexToInsert, value);
5244 return indexToInsert;
5247 template<
typename CmpLess,
typename VectorT>
5248 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5251 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5256 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5258 size_t indexToRemove = it - vector.begin();
5259 VmaVectorRemove(vector, indexToRemove);
5276 template<
typename T,
typename AllocatorT,
size_t N>
5277 class VmaSmallVector
5280 typedef T value_type;
5282 VmaSmallVector(
const AllocatorT& allocator) :
5284 m_DynamicArray(allocator)
5287 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5289 m_DynamicArray(count > N ? count : 0, allocator)
5292 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5293 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5294 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5295 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5297 bool empty()
const {
return m_Count == 0; }
5298 size_t size()
const {
return m_Count; }
5299 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5300 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5302 T& operator[](
size_t index)
5304 VMA_HEAVY_ASSERT(index < m_Count);
5305 return data()[index];
5307 const T& operator[](
size_t index)
const
5309 VMA_HEAVY_ASSERT(index < m_Count);
5310 return data()[index];
5315 VMA_HEAVY_ASSERT(m_Count > 0);
5318 const T& front()
const
5320 VMA_HEAVY_ASSERT(m_Count > 0);
5325 VMA_HEAVY_ASSERT(m_Count > 0);
5326 return data()[m_Count - 1];
5328 const T& back()
const
5330 VMA_HEAVY_ASSERT(m_Count > 0);
5331 return data()[m_Count - 1];
5334 void resize(
size_t newCount,
bool freeMemory =
false)
5336 if(newCount > N && m_Count > N)
5339 m_DynamicArray.resize(newCount);
5342 m_DynamicArray.shrink_to_fit();
5345 else if(newCount > N && m_Count <= N)
5348 m_DynamicArray.resize(newCount);
5351 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5354 else if(newCount <= N && m_Count > N)
5359 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5361 m_DynamicArray.resize(0);
5364 m_DynamicArray.shrink_to_fit();
5374 void clear(
bool freeMemory =
false)
5376 m_DynamicArray.clear();
5379 m_DynamicArray.shrink_to_fit();
5384 void insert(
size_t index,
const T& src)
5386 VMA_HEAVY_ASSERT(index <= m_Count);
5387 const size_t oldCount = size();
5388 resize(oldCount + 1);
5389 T*
const dataPtr = data();
5390 if(index < oldCount)
5393 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5395 dataPtr[index] = src;
5398 void remove(
size_t index)
5400 VMA_HEAVY_ASSERT(index < m_Count);
5401 const size_t oldCount = size();
5402 if(index < oldCount - 1)
5405 T*
const dataPtr = data();
5406 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5408 resize(oldCount - 1);
5411 void push_back(
const T& src)
5413 const size_t newIndex = size();
5414 resize(newIndex + 1);
5415 data()[newIndex] = src;
5420 VMA_HEAVY_ASSERT(m_Count > 0);
5424 void push_front(
const T& src)
5431 VMA_HEAVY_ASSERT(m_Count > 0);
5435 typedef T* iterator;
5437 iterator begin() {
return data(); }
5438 iterator end() {
return data() + m_Count; }
5443 VmaVector<T, AllocatorT> m_DynamicArray;
5454 template<
typename T>
5455 class VmaPoolAllocator
5457 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5459 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5460 ~VmaPoolAllocator();
5461 template<
typename... Types> T* Alloc(Types... args);
5467 uint32_t NextFreeIndex;
5468 alignas(T)
char Value[
sizeof(T)];
5475 uint32_t FirstFreeIndex;
5478 const VkAllocationCallbacks* m_pAllocationCallbacks;
5479 const uint32_t m_FirstBlockCapacity;
5480 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5482 ItemBlock& CreateNewBlock();
5485 template<
typename T>
5486 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5487 m_pAllocationCallbacks(pAllocationCallbacks),
5488 m_FirstBlockCapacity(firstBlockCapacity),
5489 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5491 VMA_ASSERT(m_FirstBlockCapacity > 1);
5494 template<
typename T>
5495 VmaPoolAllocator<T>::~VmaPoolAllocator()
5497 for(
size_t i = m_ItemBlocks.size(); i--; )
5498 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5499 m_ItemBlocks.clear();
5502 template<
typename T>
5503 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5505 for(
size_t i = m_ItemBlocks.size(); i--; )
5507 ItemBlock& block = m_ItemBlocks[i];
5509 if(block.FirstFreeIndex != UINT32_MAX)
5511 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5512 block.FirstFreeIndex = pItem->NextFreeIndex;
5513 T* result = (T*)&pItem->Value;
5514 new(result)T(std::forward<Types>(args)...);
5520 ItemBlock& newBlock = CreateNewBlock();
5521 Item*
const pItem = &newBlock.pItems[0];
5522 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5523 T* result = (T*)&pItem->Value;
5524 new(result)T(std::forward<Types>(args)...);
5528 template<
typename T>
5529 void VmaPoolAllocator<T>::Free(T* ptr)
5532 for(
size_t i = m_ItemBlocks.size(); i--; )
5534 ItemBlock& block = m_ItemBlocks[i];
5538 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5541 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5544 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5545 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5546 block.FirstFreeIndex = index;
5550 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5553 template<
typename T>
5554 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5556 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5557 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5559 const ItemBlock newBlock = {
5560 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5564 m_ItemBlocks.push_back(newBlock);
5567 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5568 newBlock.pItems[i].NextFreeIndex = i + 1;
5569 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5570 return m_ItemBlocks.back();
5576 #if VMA_USE_STL_LIST
5578 #define VmaList std::list
5582 template<
typename T>
5591 template<
typename T>
5594 VMA_CLASS_NO_COPY(VmaRawList)
5596 typedef VmaListItem<T> ItemType;
5598 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5602 size_t GetCount()
const {
return m_Count; }
5603 bool IsEmpty()
const {
return m_Count == 0; }
5605 ItemType* Front() {
return m_pFront; }
5606 const ItemType* Front()
const {
return m_pFront; }
5607 ItemType* Back() {
return m_pBack; }
5608 const ItemType* Back()
const {
return m_pBack; }
5610 ItemType* PushBack();
5611 ItemType* PushFront();
5612 ItemType* PushBack(
const T& value);
5613 ItemType* PushFront(
const T& value);
5618 ItemType* InsertBefore(ItemType* pItem);
5620 ItemType* InsertAfter(ItemType* pItem);
5622 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5623 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5625 void Remove(ItemType* pItem);
5628 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5629 VmaPoolAllocator<ItemType> m_ItemAllocator;
5635 template<
typename T>
5636 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5637 m_pAllocationCallbacks(pAllocationCallbacks),
5638 m_ItemAllocator(pAllocationCallbacks, 128),
5645 template<
typename T>
5646 VmaRawList<T>::~VmaRawList()
5652 template<
typename T>
5653 void VmaRawList<T>::Clear()
5655 if(IsEmpty() ==
false)
5657 ItemType* pItem = m_pBack;
5658 while(pItem != VMA_NULL)
5660 ItemType*
const pPrevItem = pItem->pPrev;
5661 m_ItemAllocator.Free(pItem);
5664 m_pFront = VMA_NULL;
5670 template<
typename T>
5671 VmaListItem<T>* VmaRawList<T>::PushBack()
5673 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5674 pNewItem->pNext = VMA_NULL;
5677 pNewItem->pPrev = VMA_NULL;
5678 m_pFront = pNewItem;
5684 pNewItem->pPrev = m_pBack;
5685 m_pBack->pNext = pNewItem;
5692 template<
typename T>
5693 VmaListItem<T>* VmaRawList<T>::PushFront()
5695 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5696 pNewItem->pPrev = VMA_NULL;
5699 pNewItem->pNext = VMA_NULL;
5700 m_pFront = pNewItem;
5706 pNewItem->pNext = m_pFront;
5707 m_pFront->pPrev = pNewItem;
5708 m_pFront = pNewItem;
5714 template<
typename T>
5715 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5717 ItemType*
const pNewItem = PushBack();
5718 pNewItem->Value = value;
5722 template<
typename T>
5723 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5725 ItemType*
const pNewItem = PushFront();
5726 pNewItem->Value = value;
5730 template<
typename T>
5731 void VmaRawList<T>::PopBack()
5733 VMA_HEAVY_ASSERT(m_Count > 0);
5734 ItemType*
const pBackItem = m_pBack;
5735 ItemType*
const pPrevItem = pBackItem->pPrev;
5736 if(pPrevItem != VMA_NULL)
5738 pPrevItem->pNext = VMA_NULL;
5740 m_pBack = pPrevItem;
5741 m_ItemAllocator.Free(pBackItem);
5745 template<
typename T>
5746 void VmaRawList<T>::PopFront()
5748 VMA_HEAVY_ASSERT(m_Count > 0);
5749 ItemType*
const pFrontItem = m_pFront;
5750 ItemType*
const pNextItem = pFrontItem->pNext;
5751 if(pNextItem != VMA_NULL)
5753 pNextItem->pPrev = VMA_NULL;
5755 m_pFront = pNextItem;
5756 m_ItemAllocator.Free(pFrontItem);
5760 template<
typename T>
5761 void VmaRawList<T>::Remove(ItemType* pItem)
5763 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5764 VMA_HEAVY_ASSERT(m_Count > 0);
5766 if(pItem->pPrev != VMA_NULL)
5768 pItem->pPrev->pNext = pItem->pNext;
5772 VMA_HEAVY_ASSERT(m_pFront == pItem);
5773 m_pFront = pItem->pNext;
5776 if(pItem->pNext != VMA_NULL)
5778 pItem->pNext->pPrev = pItem->pPrev;
5782 VMA_HEAVY_ASSERT(m_pBack == pItem);
5783 m_pBack = pItem->pPrev;
5786 m_ItemAllocator.Free(pItem);
5790 template<
typename T>
5791 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5793 if(pItem != VMA_NULL)
5795 ItemType*
const prevItem = pItem->pPrev;
5796 ItemType*
const newItem = m_ItemAllocator.Alloc();
5797 newItem->pPrev = prevItem;
5798 newItem->pNext = pItem;
5799 pItem->pPrev = newItem;
5800 if(prevItem != VMA_NULL)
5802 prevItem->pNext = newItem;
5806 VMA_HEAVY_ASSERT(m_pFront == pItem);
5816 template<
typename T>
5817 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5819 if(pItem != VMA_NULL)
5821 ItemType*
const nextItem = pItem->pNext;
5822 ItemType*
const newItem = m_ItemAllocator.Alloc();
5823 newItem->pNext = nextItem;
5824 newItem->pPrev = pItem;
5825 pItem->pNext = newItem;
5826 if(nextItem != VMA_NULL)
5828 nextItem->pPrev = newItem;
5832 VMA_HEAVY_ASSERT(m_pBack == pItem);
5842 template<
typename T>
5843 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5845 ItemType*
const newItem = InsertBefore(pItem);
5846 newItem->Value = value;
5850 template<
typename T>
5851 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5853 ItemType*
const newItem = InsertAfter(pItem);
5854 newItem->Value = value;
5858 template<
typename T,
typename AllocatorT>
5861 VMA_CLASS_NO_COPY(VmaList)
5872 T& operator*()
const
5874 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5875 return m_pItem->Value;
5877 T* operator->()
const
5879 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5880 return &m_pItem->Value;
5883 iterator& operator++()
5885 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5886 m_pItem = m_pItem->pNext;
5889 iterator& operator--()
5891 if(m_pItem != VMA_NULL)
5893 m_pItem = m_pItem->pPrev;
5897 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5898 m_pItem = m_pList->Back();
5903 iterator operator++(
int)
5905 iterator result = *
this;
5909 iterator operator--(
int)
5911 iterator result = *
this;
5916 bool operator==(
const iterator& rhs)
const
5918 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5919 return m_pItem == rhs.m_pItem;
5921 bool operator!=(
const iterator& rhs)
const
5923 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5924 return m_pItem != rhs.m_pItem;
5928 VmaRawList<T>* m_pList;
5929 VmaListItem<T>* m_pItem;
5931 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5937 friend class VmaList<T, AllocatorT>;
5940 class const_iterator
5949 const_iterator(
const iterator& src) :
5950 m_pList(src.m_pList),
5951 m_pItem(src.m_pItem)
5955 const T& operator*()
const
5957 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5958 return m_pItem->Value;
5960 const T* operator->()
const
5962 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5963 return &m_pItem->Value;
5966 const_iterator& operator++()
5968 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5969 m_pItem = m_pItem->pNext;
5972 const_iterator& operator--()
5974 if(m_pItem != VMA_NULL)
5976 m_pItem = m_pItem->pPrev;
5980 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5981 m_pItem = m_pList->Back();
5986 const_iterator operator++(
int)
5988 const_iterator result = *
this;
5992 const_iterator operator--(
int)
5994 const_iterator result = *
this;
5999 bool operator==(
const const_iterator& rhs)
const
6001 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6002 return m_pItem == rhs.m_pItem;
6004 bool operator!=(
const const_iterator& rhs)
const
6006 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6007 return m_pItem != rhs.m_pItem;
6011 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
6017 const VmaRawList<T>* m_pList;
6018 const VmaListItem<T>* m_pItem;
6020 friend class VmaList<T, AllocatorT>;
6023 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
6025 bool empty()
const {
return m_RawList.IsEmpty(); }
6026 size_t size()
const {
return m_RawList.GetCount(); }
6028 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
6029 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
6031 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
6032 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
6034 void clear() { m_RawList.Clear(); }
6035 void push_back(
const T& value) { m_RawList.PushBack(value); }
6036 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
6037 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
6040 VmaRawList<T> m_RawList;
6059 template<
typename ItemTypeTraits>
6060 class VmaIntrusiveLinkedList
6063 typedef typename ItemTypeTraits::ItemType ItemType;
6064 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
6065 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
6067 VmaIntrusiveLinkedList() { }
6068 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6069 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
6070 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
6072 src.m_Front = src.m_Back = VMA_NULL;
6075 ~VmaIntrusiveLinkedList()
6077 VMA_HEAVY_ASSERT(IsEmpty());
6079 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6080 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
6084 VMA_HEAVY_ASSERT(IsEmpty());
6085 m_Front = src.m_Front;
6086 m_Back = src.m_Back;
6087 m_Count = src.m_Count;
6088 src.m_Front = src.m_Back = VMA_NULL;
6097 ItemType* item = m_Back;
6098 while(item != VMA_NULL)
6100 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
6101 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6102 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6110 size_t GetCount()
const {
return m_Count; }
6111 bool IsEmpty()
const {
return m_Count == 0; }
6112 ItemType* Front() {
return m_Front; }
6113 const ItemType* Front()
const {
return m_Front; }
6114 ItemType* Back() {
return m_Back; }
6115 const ItemType* Back()
const {
return m_Back; }
6116 void PushBack(ItemType* item)
6118 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6127 ItemTypeTraits::AccessPrev(item) = m_Back;
6128 ItemTypeTraits::AccessNext(m_Back) = item;
6133 void PushFront(ItemType* item)
6135 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6144 ItemTypeTraits::AccessNext(item) = m_Front;
6145 ItemTypeTraits::AccessPrev(m_Front) = item;
6152 VMA_HEAVY_ASSERT(m_Count > 0);
6153 ItemType*
const backItem = m_Back;
6154 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
6155 if(prevItem != VMA_NULL)
6157 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
6161 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
6162 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
6165 ItemType* PopFront()
6167 VMA_HEAVY_ASSERT(m_Count > 0);
6168 ItemType*
const frontItem = m_Front;
6169 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
6170 if(nextItem != VMA_NULL)
6172 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
6176 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
6177 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
6182 void InsertBefore(ItemType* existingItem, ItemType* newItem)
6184 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6185 if(existingItem != VMA_NULL)
6187 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
6188 ItemTypeTraits::AccessPrev(newItem) = prevItem;
6189 ItemTypeTraits::AccessNext(newItem) = existingItem;
6190 ItemTypeTraits::AccessPrev(existingItem) = newItem;
6191 if(prevItem != VMA_NULL)
6193 ItemTypeTraits::AccessNext(prevItem) = newItem;
6197 VMA_HEAVY_ASSERT(m_Front == existingItem);
6206 void InsertAfter(ItemType* existingItem, ItemType* newItem)
6208 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6209 if(existingItem != VMA_NULL)
6211 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
6212 ItemTypeTraits::AccessNext(newItem) = nextItem;
6213 ItemTypeTraits::AccessPrev(newItem) = existingItem;
6214 ItemTypeTraits::AccessNext(existingItem) = newItem;
6215 if(nextItem != VMA_NULL)
6217 ItemTypeTraits::AccessPrev(nextItem) = newItem;
6221 VMA_HEAVY_ASSERT(m_Back == existingItem);
6227 return PushFront(newItem);
6229 void Remove(ItemType* item)
6231 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
6232 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
6234 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
6238 VMA_HEAVY_ASSERT(m_Front == item);
6239 m_Front = ItemTypeTraits::GetNext(item);
6242 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
6244 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
6248 VMA_HEAVY_ASSERT(m_Back == item);
6249 m_Back = ItemTypeTraits::GetPrev(item);
6251 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6252 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6256 ItemType* m_Front = VMA_NULL;
6257 ItemType* m_Back = VMA_NULL;
6267 #if VMA_USE_STL_UNORDERED_MAP
6269 #define VmaPair std::pair
6271 #define VMA_MAP_TYPE(KeyT, ValueT) \
6272 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
6276 template<
typename T1,
typename T2>
6282 VmaPair() : first(), second() { }
6283 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6289 template<
typename KeyT,
typename ValueT>
6293 typedef VmaPair<KeyT, ValueT> PairType;
6294 typedef PairType* iterator;
6296 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6298 iterator begin() {
return m_Vector.begin(); }
6299 iterator end() {
return m_Vector.end(); }
6301 void insert(
const PairType& pair);
6302 iterator find(
const KeyT& key);
6303 void erase(iterator it);
6306 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6309 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6311 template<
typename FirstT,
typename SecondT>
6312 struct VmaPairFirstLess
6314 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6316 return lhs.first < rhs.first;
6318 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6320 return lhs.first < rhsFirst;
6324 template<
typename KeyT,
typename ValueT>
6325 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6327 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6329 m_Vector.data() + m_Vector.size(),
6331 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6332 VmaVectorInsert(m_Vector, indexToInsert, pair);
6335 template<
typename KeyT,
typename ValueT>
6336 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6338 PairType* it = VmaBinaryFindFirstNotLess(
6340 m_Vector.data() + m_Vector.size(),
6342 VmaPairFirstLess<KeyT, ValueT>());
6343 if((it != m_Vector.end()) && (it->first == key))
6349 return m_Vector.end();
6353 template<
typename KeyT,
typename ValueT>
6354 void VmaMap<KeyT, ValueT>::erase(iterator it)
6356 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6365 class VmaDeviceMemoryBlock;
6367 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6369 struct VmaAllocation_T
6372 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6376 FLAG_USER_DATA_STRING = 0x01,
6380 enum ALLOCATION_TYPE
6382 ALLOCATION_TYPE_NONE,
6383 ALLOCATION_TYPE_BLOCK,
6384 ALLOCATION_TYPE_DEDICATED,
6391 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6394 m_pUserData{VMA_NULL},
6395 m_LastUseFrameIndex{currentFrameIndex},
6396 m_MemoryTypeIndex{0},
6397 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6398 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6400 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6402 #if VMA_STATS_STRING_ENABLED
6403 m_CreationFrameIndex = currentFrameIndex;
6404 m_BufferImageUsage = 0;
6410 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6413 VMA_ASSERT(m_pUserData == VMA_NULL);
6416 void InitBlockAllocation(
6417 VmaDeviceMemoryBlock* block,
6418 VkDeviceSize offset,
6419 VkDeviceSize alignment,
6421 uint32_t memoryTypeIndex,
6422 VmaSuballocationType suballocationType,
6426 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6427 VMA_ASSERT(block != VMA_NULL);
6428 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6429 m_Alignment = alignment;
6431 m_MemoryTypeIndex = memoryTypeIndex;
6432 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6433 m_SuballocationType = (uint8_t)suballocationType;
6434 m_BlockAllocation.m_Block = block;
6435 m_BlockAllocation.m_Offset = offset;
6436 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6441 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6442 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6443 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6444 m_MemoryTypeIndex = 0;
6445 m_BlockAllocation.m_Block = VMA_NULL;
6446 m_BlockAllocation.m_Offset = 0;
6447 m_BlockAllocation.m_CanBecomeLost =
true;
6450 void ChangeBlockAllocation(
6452 VmaDeviceMemoryBlock* block,
6453 VkDeviceSize offset);
6455 void ChangeOffset(VkDeviceSize newOffset);
6458 void InitDedicatedAllocation(
6459 uint32_t memoryTypeIndex,
6460 VkDeviceMemory hMemory,
6461 VmaSuballocationType suballocationType,
6465 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6466 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6467 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6470 m_MemoryTypeIndex = memoryTypeIndex;
6471 m_SuballocationType = (uint8_t)suballocationType;
6472 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6473 m_DedicatedAllocation.m_hMemory = hMemory;
6474 m_DedicatedAllocation.m_pMappedData = pMappedData;
6475 m_DedicatedAllocation.m_Prev = VMA_NULL;
6476 m_DedicatedAllocation.m_Next = VMA_NULL;
6479 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6480 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6481 VkDeviceSize GetSize()
const {
return m_Size; }
6482 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6483 void* GetUserData()
const {
return m_pUserData; }
6484 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6485 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6487 VmaDeviceMemoryBlock* GetBlock()
const
6489 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6490 return m_BlockAllocation.m_Block;
6492 VkDeviceSize GetOffset()
const;
6493 VkDeviceMemory GetMemory()
const;
6494 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6495 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6496 void* GetMappedData()
const;
6497 bool CanBecomeLost()
const;
6499 uint32_t GetLastUseFrameIndex()
const
6501 return m_LastUseFrameIndex.load();
6503 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6505 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6515 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6517 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6519 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6530 void BlockAllocMap();
6531 void BlockAllocUnmap();
6532 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6535 #if VMA_STATS_STRING_ENABLED
6536 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6537 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6539 void InitBufferImageUsage(uint32_t bufferImageUsage)
6541 VMA_ASSERT(m_BufferImageUsage == 0);
6542 m_BufferImageUsage = bufferImageUsage;
6545 void PrintParameters(
class VmaJsonWriter& json)
const;
6549 VkDeviceSize m_Alignment;
6550 VkDeviceSize m_Size;
6552 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6553 uint32_t m_MemoryTypeIndex;
6555 uint8_t m_SuballocationType;
6562 struct BlockAllocation
6564 VmaDeviceMemoryBlock* m_Block;
6565 VkDeviceSize m_Offset;
6566 bool m_CanBecomeLost;
6570 struct DedicatedAllocation
6572 VkDeviceMemory m_hMemory;
6573 void* m_pMappedData;
6574 VmaAllocation_T* m_Prev;
6575 VmaAllocation_T* m_Next;
6581 BlockAllocation m_BlockAllocation;
6583 DedicatedAllocation m_DedicatedAllocation;
6586 #if VMA_STATS_STRING_ENABLED
6587 uint32_t m_CreationFrameIndex;
6588 uint32_t m_BufferImageUsage;
6593 friend struct VmaDedicatedAllocationListItemTraits;
6596 struct VmaDedicatedAllocationListItemTraits
6598 typedef VmaAllocation_T ItemType;
6599 static ItemType* GetPrev(
const ItemType* item)
6601 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6602 return item->m_DedicatedAllocation.m_Prev;
6604 static ItemType* GetNext(
const ItemType* item)
6606 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6607 return item->m_DedicatedAllocation.m_Next;
6609 static ItemType*& AccessPrev(ItemType* item)
6611 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6612 return item->m_DedicatedAllocation.m_Prev;
6614 static ItemType*& AccessNext(ItemType* item){
6615 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6616 return item->m_DedicatedAllocation.m_Next;
6624 struct VmaSuballocation
6626 VkDeviceSize offset;
6629 VmaSuballocationType type;
6633 struct VmaSuballocationOffsetLess
6635 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6637 return lhs.offset < rhs.offset;
6640 struct VmaSuballocationOffsetGreater
6642 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6644 return lhs.offset > rhs.offset;
6648 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6651 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6653 enum class VmaAllocationRequestType
6675 struct VmaAllocationRequest
6677 VkDeviceSize offset;
6678 VkDeviceSize sumFreeSize;
6679 VkDeviceSize sumItemSize;
6680 VmaSuballocationList::iterator item;
6681 size_t itemsToMakeLostCount;
6683 VmaAllocationRequestType type;
6685 VkDeviceSize CalcCost()
const
6687 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6695 class VmaBlockMetadata
6699 virtual ~VmaBlockMetadata() { }
6700 virtual void Init(VkDeviceSize size) { m_Size = size; }
6703 virtual bool Validate()
const = 0;
6704 VkDeviceSize GetSize()
const {
return m_Size; }
6705 virtual size_t GetAllocationCount()
const = 0;
6706 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6707 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6709 virtual bool IsEmpty()
const = 0;
6711 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6713 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6715 #if VMA_STATS_STRING_ENABLED
6716 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6722 virtual bool CreateAllocationRequest(
6723 uint32_t currentFrameIndex,
6724 uint32_t frameInUseCount,
6725 VkDeviceSize bufferImageGranularity,
6726 VkDeviceSize allocSize,
6727 VkDeviceSize allocAlignment,
6729 VmaSuballocationType allocType,
6730 bool canMakeOtherLost,
6733 VmaAllocationRequest* pAllocationRequest) = 0;
6735 virtual bool MakeRequestedAllocationsLost(
6736 uint32_t currentFrameIndex,
6737 uint32_t frameInUseCount,
6738 VmaAllocationRequest* pAllocationRequest) = 0;
6740 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6742 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6746 const VmaAllocationRequest& request,
6747 VmaSuballocationType type,
6748 VkDeviceSize allocSize,
6753 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6756 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6758 #if VMA_STATS_STRING_ENABLED
6759 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6760 VkDeviceSize unusedBytes,
6761 size_t allocationCount,
6762 size_t unusedRangeCount)
const;
6763 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6764 VkDeviceSize offset,
6766 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6767 VkDeviceSize offset,
6768 VkDeviceSize size)
const;
6769 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6773 VkDeviceSize m_Size;
6774 const VkAllocationCallbacks* m_pAllocationCallbacks;
6777 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6778 VMA_ASSERT(0 && "Validation failed: " #cond); \
6782 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6784 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6787 virtual ~VmaBlockMetadata_Generic();
6788 virtual void Init(VkDeviceSize size);
6790 virtual bool Validate()
const;
6791 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6792 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6793 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6794 virtual bool IsEmpty()
const;
6796 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6797 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6799 #if VMA_STATS_STRING_ENABLED
6800 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6803 virtual bool CreateAllocationRequest(
6804 uint32_t currentFrameIndex,
6805 uint32_t frameInUseCount,
6806 VkDeviceSize bufferImageGranularity,
6807 VkDeviceSize allocSize,
6808 VkDeviceSize allocAlignment,
6810 VmaSuballocationType allocType,
6811 bool canMakeOtherLost,
6813 VmaAllocationRequest* pAllocationRequest);
6815 virtual bool MakeRequestedAllocationsLost(
6816 uint32_t currentFrameIndex,
6817 uint32_t frameInUseCount,
6818 VmaAllocationRequest* pAllocationRequest);
6820 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6822 virtual VkResult CheckCorruption(
const void* pBlockData);
6825 const VmaAllocationRequest& request,
6826 VmaSuballocationType type,
6827 VkDeviceSize allocSize,
6831 virtual void FreeAtOffset(VkDeviceSize offset);
6836 bool IsBufferImageGranularityConflictPossible(
6837 VkDeviceSize bufferImageGranularity,
6838 VmaSuballocationType& inOutPrevSuballocType)
const;
6841 friend class VmaDefragmentationAlgorithm_Generic;
6842 friend class VmaDefragmentationAlgorithm_Fast;
6844 uint32_t m_FreeCount;
6845 VkDeviceSize m_SumFreeSize;
6846 VmaSuballocationList m_Suballocations;
6849 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6851 bool ValidateFreeSuballocationList()
const;
6855 bool CheckAllocation(
6856 uint32_t currentFrameIndex,
6857 uint32_t frameInUseCount,
6858 VkDeviceSize bufferImageGranularity,
6859 VkDeviceSize allocSize,
6860 VkDeviceSize allocAlignment,
6861 VmaSuballocationType allocType,
6862 VmaSuballocationList::const_iterator suballocItem,
6863 bool canMakeOtherLost,
6864 VkDeviceSize* pOffset,
6865 size_t* itemsToMakeLostCount,
6866 VkDeviceSize* pSumFreeSize,
6867 VkDeviceSize* pSumItemSize)
const;
6869 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6873 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6876 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6879 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6960 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6962 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6965 virtual ~VmaBlockMetadata_Linear();
6966 virtual void Init(VkDeviceSize size);
6968 virtual bool Validate()
const;
6969 virtual size_t GetAllocationCount()
const;
6970 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6971 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6972 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6974 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6975 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6977 #if VMA_STATS_STRING_ENABLED
6978 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6981 virtual bool CreateAllocationRequest(
6982 uint32_t currentFrameIndex,
6983 uint32_t frameInUseCount,
6984 VkDeviceSize bufferImageGranularity,
6985 VkDeviceSize allocSize,
6986 VkDeviceSize allocAlignment,
6988 VmaSuballocationType allocType,
6989 bool canMakeOtherLost,
6991 VmaAllocationRequest* pAllocationRequest);
6993 virtual bool MakeRequestedAllocationsLost(
6994 uint32_t currentFrameIndex,
6995 uint32_t frameInUseCount,
6996 VmaAllocationRequest* pAllocationRequest);
6998 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7000 virtual VkResult CheckCorruption(
const void* pBlockData);
7003 const VmaAllocationRequest& request,
7004 VmaSuballocationType type,
7005 VkDeviceSize allocSize,
7009 virtual void FreeAtOffset(VkDeviceSize offset);
7019 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
7021 enum SECOND_VECTOR_MODE
7023 SECOND_VECTOR_EMPTY,
7028 SECOND_VECTOR_RING_BUFFER,
7034 SECOND_VECTOR_DOUBLE_STACK,
7037 VkDeviceSize m_SumFreeSize;
7038 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7039 uint32_t m_1stVectorIndex;
7040 SECOND_VECTOR_MODE m_2ndVectorMode;
7042 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7043 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7044 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7045 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7048 size_t m_1stNullItemsBeginCount;
7050 size_t m_1stNullItemsMiddleCount;
7052 size_t m_2ndNullItemsCount;
7054 bool ShouldCompact1st()
const;
7055 void CleanupAfterFree();
7057 bool CreateAllocationRequest_LowerAddress(
7058 uint32_t currentFrameIndex,
7059 uint32_t frameInUseCount,
7060 VkDeviceSize bufferImageGranularity,
7061 VkDeviceSize allocSize,
7062 VkDeviceSize allocAlignment,
7063 VmaSuballocationType allocType,
7064 bool canMakeOtherLost,
7066 VmaAllocationRequest* pAllocationRequest);
7067 bool CreateAllocationRequest_UpperAddress(
7068 uint32_t currentFrameIndex,
7069 uint32_t frameInUseCount,
7070 VkDeviceSize bufferImageGranularity,
7071 VkDeviceSize allocSize,
7072 VkDeviceSize allocAlignment,
7073 VmaSuballocationType allocType,
7074 bool canMakeOtherLost,
7076 VmaAllocationRequest* pAllocationRequest);
7090 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
7092 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
7095 virtual ~VmaBlockMetadata_Buddy();
7096 virtual void Init(VkDeviceSize size);
7098 virtual bool Validate()
const;
7099 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
7100 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
7101 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7102 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
7104 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7105 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7107 #if VMA_STATS_STRING_ENABLED
7108 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7111 virtual bool CreateAllocationRequest(
7112 uint32_t currentFrameIndex,
7113 uint32_t frameInUseCount,
7114 VkDeviceSize bufferImageGranularity,
7115 VkDeviceSize allocSize,
7116 VkDeviceSize allocAlignment,
7118 VmaSuballocationType allocType,
7119 bool canMakeOtherLost,
7121 VmaAllocationRequest* pAllocationRequest);
7123 virtual bool MakeRequestedAllocationsLost(
7124 uint32_t currentFrameIndex,
7125 uint32_t frameInUseCount,
7126 VmaAllocationRequest* pAllocationRequest);
7128 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7130 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
7133 const VmaAllocationRequest& request,
7134 VmaSuballocationType type,
7135 VkDeviceSize allocSize,
7138 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
7139 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
7142 static const VkDeviceSize MIN_NODE_SIZE = 32;
7143 static const size_t MAX_LEVELS = 30;
7145 struct ValidationContext
7147 size_t calculatedAllocationCount;
7148 size_t calculatedFreeCount;
7149 VkDeviceSize calculatedSumFreeSize;
7151 ValidationContext() :
7152 calculatedAllocationCount(0),
7153 calculatedFreeCount(0),
7154 calculatedSumFreeSize(0) { }
7159 VkDeviceSize offset;
7189 VkDeviceSize m_UsableSize;
7190 uint32_t m_LevelCount;
7196 } m_FreeList[MAX_LEVELS];
7198 size_t m_AllocationCount;
7202 VkDeviceSize m_SumFreeSize;
7204 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
7205 void DeleteNode(Node* node);
7206 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
7207 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
7208 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
7210 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
7211 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
7215 void AddToFreeListFront(uint32_t level, Node* node);
7219 void RemoveFromFreeList(uint32_t level, Node* node);
7221 #if VMA_STATS_STRING_ENABLED
7222 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
7232 class VmaDeviceMemoryBlock
7234 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
7236 VmaBlockMetadata* m_pMetadata;
7240 ~VmaDeviceMemoryBlock()
7242 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
7243 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
7250 uint32_t newMemoryTypeIndex,
7251 VkDeviceMemory newMemory,
7252 VkDeviceSize newSize,
7254 uint32_t algorithm);
7258 VmaPool GetParentPool()
const {
return m_hParentPool; }
7259 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
7260 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7261 uint32_t GetId()
const {
return m_Id; }
7262 void* GetMappedData()
const {
return m_pMappedData; }
7265 bool Validate()
const;
7270 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
7273 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7274 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7276 VkResult BindBufferMemory(
7279 VkDeviceSize allocationLocalOffset,
7282 VkResult BindImageMemory(
7285 VkDeviceSize allocationLocalOffset,
7291 uint32_t m_MemoryTypeIndex;
7293 VkDeviceMemory m_hMemory;
7301 uint32_t m_MapCount;
7302 void* m_pMappedData;
7305 struct VmaDefragmentationMove
7307 size_t srcBlockIndex;
7308 size_t dstBlockIndex;
7309 VkDeviceSize srcOffset;
7310 VkDeviceSize dstOffset;
7313 VmaDeviceMemoryBlock* pSrcBlock;
7314 VmaDeviceMemoryBlock* pDstBlock;
7317 class VmaDefragmentationAlgorithm;
7325 struct VmaBlockVector
7327 VMA_CLASS_NO_COPY(VmaBlockVector)
7332 uint32_t memoryTypeIndex,
7333 VkDeviceSize preferredBlockSize,
7334 size_t minBlockCount,
7335 size_t maxBlockCount,
7336 VkDeviceSize bufferImageGranularity,
7337 uint32_t frameInUseCount,
7338 bool explicitBlockSize,
7341 VkDeviceSize minAllocationAlignment);
7344 VkResult CreateMinBlocks();
7346 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7347 VmaPool GetParentPool()
const {
return m_hParentPool; }
7348 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7349 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7350 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7351 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7352 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7353 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7358 bool IsCorruptionDetectionEnabled()
const;
7361 uint32_t currentFrameIndex,
7363 VkDeviceSize alignment,
7365 VmaSuballocationType suballocType,
7366 size_t allocationCount,
7374 #if VMA_STATS_STRING_ENABLED
7375 void PrintDetailedMap(
class VmaJsonWriter& json);
7378 void MakePoolAllocationsLost(
7379 uint32_t currentFrameIndex,
7380 size_t* pLostAllocationCount);
7381 VkResult CheckCorruption();
7385 class VmaBlockVectorDefragmentationContext* pCtx,
7387 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7388 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7389 VkCommandBuffer commandBuffer);
7390 void DefragmentationEnd(
7391 class VmaBlockVectorDefragmentationContext* pCtx,
7395 uint32_t ProcessDefragmentations(
7396 class VmaBlockVectorDefragmentationContext *pCtx,
7399 void CommitDefragmentations(
7400 class VmaBlockVectorDefragmentationContext *pCtx,
7406 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7407 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7408 size_t CalcAllocationCount()
const;
7409 bool IsBufferImageGranularityConflictPossible()
const;
7412 friend class VmaDefragmentationAlgorithm_Generic;
7416 const uint32_t m_MemoryTypeIndex;
7417 const VkDeviceSize m_PreferredBlockSize;
7418 const size_t m_MinBlockCount;
7419 const size_t m_MaxBlockCount;
7420 const VkDeviceSize m_BufferImageGranularity;
7421 const uint32_t m_FrameInUseCount;
7422 const bool m_ExplicitBlockSize;
7423 const uint32_t m_Algorithm;
7424 const float m_Priority;
7425 const VkDeviceSize m_MinAllocationAlignment;
7426 VMA_RW_MUTEX m_Mutex;
7430 bool m_HasEmptyBlock;
7432 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7433 uint32_t m_NextBlockId;
7435 VkDeviceSize CalcMaxBlockSize()
const;
7438 void Remove(VmaDeviceMemoryBlock* pBlock);
7442 void IncrementallySortBlocks();
7444 VkResult AllocatePage(
7445 uint32_t currentFrameIndex,
7447 VkDeviceSize alignment,
7449 VmaSuballocationType suballocType,
7453 VkResult AllocateFromBlock(
7454 VmaDeviceMemoryBlock* pBlock,
7455 uint32_t currentFrameIndex,
7457 VkDeviceSize alignment,
7460 VmaSuballocationType suballocType,
7464 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7467 void ApplyDefragmentationMovesCpu(
7468 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7469 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7471 void ApplyDefragmentationMovesGpu(
7472 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7473 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7474 VkCommandBuffer commandBuffer);
7482 void UpdateHasEmptyBlock();
7487 VMA_CLASS_NO_COPY(VmaPool_T)
7489 VmaBlockVector m_BlockVector;
7494 VkDeviceSize preferredBlockSize);
7497 uint32_t GetId()
const {
return m_Id; }
7498 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7500 const char* GetName()
const {
return m_Name; }
7501 void SetName(
const char* pName);
7503 #if VMA_STATS_STRING_ENABLED
7510 VmaPool_T* m_PrevPool = VMA_NULL;
7511 VmaPool_T* m_NextPool = VMA_NULL;
7512 friend struct VmaPoolListItemTraits;
7515 struct VmaPoolListItemTraits
7517 typedef VmaPool_T ItemType;
7518 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
7519 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
7520 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
7521 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
7531 class VmaDefragmentationAlgorithm
7533 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7535 VmaDefragmentationAlgorithm(
7537 VmaBlockVector* pBlockVector,
7538 uint32_t currentFrameIndex) :
7539 m_hAllocator(hAllocator),
7540 m_pBlockVector(pBlockVector),
7541 m_CurrentFrameIndex(currentFrameIndex)
7544 virtual ~VmaDefragmentationAlgorithm()
7548 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7549 virtual void AddAll() = 0;
7551 virtual VkResult Defragment(
7552 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7553 VkDeviceSize maxBytesToMove,
7554 uint32_t maxAllocationsToMove,
7557 virtual VkDeviceSize GetBytesMoved()
const = 0;
7558 virtual uint32_t GetAllocationsMoved()
const = 0;
7562 VmaBlockVector*
const m_pBlockVector;
7563 const uint32_t m_CurrentFrameIndex;
7565 struct AllocationInfo
7568 VkBool32* m_pChanged;
7571 m_hAllocation(VK_NULL_HANDLE),
7572 m_pChanged(VMA_NULL)
7576 m_hAllocation(hAlloc),
7577 m_pChanged(pChanged)
7583 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7585 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7587 VmaDefragmentationAlgorithm_Generic(
7589 VmaBlockVector* pBlockVector,
7590 uint32_t currentFrameIndex,
7591 bool overlappingMoveSupported);
7592 virtual ~VmaDefragmentationAlgorithm_Generic();
7594 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7595 virtual void AddAll() { m_AllAllocations =
true; }
7597 virtual VkResult Defragment(
7598 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7599 VkDeviceSize maxBytesToMove,
7600 uint32_t maxAllocationsToMove,
7603 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7604 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7607 uint32_t m_AllocationCount;
7608 bool m_AllAllocations;
7610 VkDeviceSize m_BytesMoved;
7611 uint32_t m_AllocationsMoved;
7613 struct AllocationInfoSizeGreater
7615 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7617 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7621 struct AllocationInfoOffsetGreater
7623 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7625 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7631 size_t m_OriginalBlockIndex;
7632 VmaDeviceMemoryBlock* m_pBlock;
7633 bool m_HasNonMovableAllocations;
7634 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7636 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7637 m_OriginalBlockIndex(SIZE_MAX),
7639 m_HasNonMovableAllocations(true),
7640 m_Allocations(pAllocationCallbacks)
7644 void CalcHasNonMovableAllocations()
7646 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7647 const size_t defragmentAllocCount = m_Allocations.size();
7648 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7651 void SortAllocationsBySizeDescending()
7653 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7656 void SortAllocationsByOffsetDescending()
7658 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7662 struct BlockPointerLess
7664 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7666 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7668 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7670 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7676 struct BlockInfoCompareMoveDestination
7678 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7680 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7684 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7688 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7696 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7697 BlockInfoVector m_Blocks;
7699 VkResult DefragmentRound(
7700 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7701 VkDeviceSize maxBytesToMove,
7702 uint32_t maxAllocationsToMove,
7703 bool freeOldAllocations);
7705 size_t CalcBlocksWithNonMovableCount()
const;
7707 static bool MoveMakesSense(
7708 size_t dstBlockIndex, VkDeviceSize dstOffset,
7709 size_t srcBlockIndex, VkDeviceSize srcOffset);
7712 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7714 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7716 VmaDefragmentationAlgorithm_Fast(
7718 VmaBlockVector* pBlockVector,
7719 uint32_t currentFrameIndex,
7720 bool overlappingMoveSupported);
7721 virtual ~VmaDefragmentationAlgorithm_Fast();
7723 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7724 virtual void AddAll() { m_AllAllocations =
true; }
7726 virtual VkResult Defragment(
7727 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7728 VkDeviceSize maxBytesToMove,
7729 uint32_t maxAllocationsToMove,
7732 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7733 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7738 size_t origBlockIndex;
7741 class FreeSpaceDatabase
7747 s.blockInfoIndex = SIZE_MAX;
7748 for(
size_t i = 0; i < MAX_COUNT; ++i)
7750 m_FreeSpaces[i] = s;
7754 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7756 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7762 size_t bestIndex = SIZE_MAX;
7763 for(
size_t i = 0; i < MAX_COUNT; ++i)
7766 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7771 if(m_FreeSpaces[i].size < size &&
7772 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7778 if(bestIndex != SIZE_MAX)
7780 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7781 m_FreeSpaces[bestIndex].offset = offset;
7782 m_FreeSpaces[bestIndex].size = size;
7786 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7787 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7789 size_t bestIndex = SIZE_MAX;
7790 VkDeviceSize bestFreeSpaceAfter = 0;
7791 for(
size_t i = 0; i < MAX_COUNT; ++i)
7794 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7796 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7798 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7800 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7802 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7805 bestFreeSpaceAfter = freeSpaceAfter;
7811 if(bestIndex != SIZE_MAX)
7813 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7814 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7816 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7819 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7820 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7821 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7826 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7836 static const size_t MAX_COUNT = 4;
7840 size_t blockInfoIndex;
7841 VkDeviceSize offset;
7843 } m_FreeSpaces[MAX_COUNT];
7846 const bool m_OverlappingMoveSupported;
7848 uint32_t m_AllocationCount;
7849 bool m_AllAllocations;
7851 VkDeviceSize m_BytesMoved;
7852 uint32_t m_AllocationsMoved;
7854 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7856 void PreprocessMetadata();
7857 void PostprocessMetadata();
7858 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7861 struct VmaBlockDefragmentationContext
7865 BLOCK_FLAG_USED = 0x00000001,
7871 class VmaBlockVectorDefragmentationContext
7873 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7877 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7878 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7879 uint32_t defragmentationMovesProcessed;
7880 uint32_t defragmentationMovesCommitted;
7881 bool hasDefragmentationPlan;
7883 VmaBlockVectorDefragmentationContext(
7886 VmaBlockVector* pBlockVector,
7887 uint32_t currFrameIndex);
7888 ~VmaBlockVectorDefragmentationContext();
7890 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7891 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7892 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7894 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7895 void AddAll() { m_AllAllocations =
true; }
7904 VmaBlockVector*
const m_pBlockVector;
7905 const uint32_t m_CurrFrameIndex;
7907 VmaDefragmentationAlgorithm* m_pAlgorithm;
7915 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7916 bool m_AllAllocations;
7919 struct VmaDefragmentationContext_T
7922 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7924 VmaDefragmentationContext_T(
7926 uint32_t currFrameIndex,
7929 ~VmaDefragmentationContext_T();
7931 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7932 void AddAllocations(
7933 uint32_t allocationCount,
7935 VkBool32* pAllocationsChanged);
7943 VkResult Defragment(
7944 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7945 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7949 VkResult DefragmentPassEnd();
7953 const uint32_t m_CurrFrameIndex;
7954 const uint32_t m_Flags;
7957 VkDeviceSize m_MaxCpuBytesToMove;
7958 uint32_t m_MaxCpuAllocationsToMove;
7959 VkDeviceSize m_MaxGpuBytesToMove;
7960 uint32_t m_MaxGpuAllocationsToMove;
7963 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7965 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7968 #if VMA_RECORDING_ENABLED
7975 void WriteConfiguration(
7976 const VkPhysicalDeviceProperties& devProps,
7977 const VkPhysicalDeviceMemoryProperties& memProps,
7978 uint32_t vulkanApiVersion,
7979 bool dedicatedAllocationExtensionEnabled,
7980 bool bindMemory2ExtensionEnabled,
7981 bool memoryBudgetExtensionEnabled,
7982 bool deviceCoherentMemoryExtensionEnabled);
7985 void RecordCreateAllocator(uint32_t frameIndex);
7986 void RecordDestroyAllocator(uint32_t frameIndex);
7987 void RecordCreatePool(uint32_t frameIndex,
7990 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7991 void RecordAllocateMemory(uint32_t frameIndex,
7992 const VkMemoryRequirements& vkMemReq,
7995 void RecordAllocateMemoryPages(uint32_t frameIndex,
7996 const VkMemoryRequirements& vkMemReq,
7998 uint64_t allocationCount,
8000 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
8001 const VkMemoryRequirements& vkMemReq,
8002 bool requiresDedicatedAllocation,
8003 bool prefersDedicatedAllocation,
8006 void RecordAllocateMemoryForImage(uint32_t frameIndex,
8007 const VkMemoryRequirements& vkMemReq,
8008 bool requiresDedicatedAllocation,
8009 bool prefersDedicatedAllocation,
8012 void RecordFreeMemory(uint32_t frameIndex,
8014 void RecordFreeMemoryPages(uint32_t frameIndex,
8015 uint64_t allocationCount,
8017 void RecordSetAllocationUserData(uint32_t frameIndex,
8019 const void* pUserData);
8020 void RecordCreateLostAllocation(uint32_t frameIndex,
8022 void RecordMapMemory(uint32_t frameIndex,
8024 void RecordUnmapMemory(uint32_t frameIndex,
8026 void RecordFlushAllocation(uint32_t frameIndex,
8027 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8028 void RecordInvalidateAllocation(uint32_t frameIndex,
8029 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8030 void RecordCreateBuffer(uint32_t frameIndex,
8031 const VkBufferCreateInfo& bufCreateInfo,
8034 void RecordCreateImage(uint32_t frameIndex,
8035 const VkImageCreateInfo& imageCreateInfo,
8038 void RecordDestroyBuffer(uint32_t frameIndex,
8040 void RecordDestroyImage(uint32_t frameIndex,
8042 void RecordTouchAllocation(uint32_t frameIndex,
8044 void RecordGetAllocationInfo(uint32_t frameIndex,
8046 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
8048 void RecordDefragmentationBegin(uint32_t frameIndex,
8051 void RecordDefragmentationEnd(uint32_t frameIndex,
8053 void RecordSetPoolName(uint32_t frameIndex,
8064 class UserDataString
8068 const char* GetString()
const {
return m_Str; }
8078 VMA_MUTEX m_FileMutex;
8079 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
8081 void GetBasicParams(CallParams& outParams);
8084 template<
typename T>
8085 void PrintPointerList(uint64_t count,
const T* pItems)
8089 fprintf(m_File,
"%p", pItems[0]);
8090 for(uint64_t i = 1; i < count; ++i)
8092 fprintf(m_File,
" %p", pItems[i]);
8097 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
8106 class VmaAllocationObjectAllocator
8108 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
8110 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
8112 template<
typename... Types>
VmaAllocation Allocate(Types... args);
8117 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
8120 struct VmaCurrentBudgetData
8122 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
8123 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
8125 #if VMA_MEMORY_BUDGET
8126 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
8127 VMA_RW_MUTEX m_BudgetMutex;
8128 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
8129 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
8130 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
8133 VmaCurrentBudgetData()
8135 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
8137 m_BlockBytes[heapIndex] = 0;
8138 m_AllocationBytes[heapIndex] = 0;
8139 #if VMA_MEMORY_BUDGET
8140 m_VulkanUsage[heapIndex] = 0;
8141 m_VulkanBudget[heapIndex] = 0;
8142 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
8146 #if VMA_MEMORY_BUDGET
8147 m_OperationsSinceBudgetFetch = 0;
8151 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8153 m_AllocationBytes[heapIndex] += allocationSize;
8154 #if VMA_MEMORY_BUDGET
8155 ++m_OperationsSinceBudgetFetch;
8159 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8161 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
8162 m_AllocationBytes[heapIndex] -= allocationSize;
8163 #if VMA_MEMORY_BUDGET
8164 ++m_OperationsSinceBudgetFetch;
8170 struct VmaAllocator_T
8172 VMA_CLASS_NO_COPY(VmaAllocator_T)
8175 uint32_t m_VulkanApiVersion;
8176 bool m_UseKhrDedicatedAllocation;
8177 bool m_UseKhrBindMemory2;
8178 bool m_UseExtMemoryBudget;
8179 bool m_UseAmdDeviceCoherentMemory;
8180 bool m_UseKhrBufferDeviceAddress;
8181 bool m_UseExtMemoryPriority;
8183 VkInstance m_hInstance;
8184 bool m_AllocationCallbacksSpecified;
8185 VkAllocationCallbacks m_AllocationCallbacks;
8187 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
8190 uint32_t m_HeapSizeLimitMask;
8192 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
8193 VkPhysicalDeviceMemoryProperties m_MemProps;
8196 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
8198 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
8199 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
8200 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
8202 VmaCurrentBudgetData m_Budget;
8203 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
8209 const VkAllocationCallbacks* GetAllocationCallbacks()
const
8211 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
8215 return m_VulkanFunctions;
8218 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
8220 VkDeviceSize GetBufferImageGranularity()
const
8223 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
8224 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
8227 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
8228 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
8230 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
8232 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
8233 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
8236 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
8238 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
8239 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8242 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
8244 return IsMemoryTypeNonCoherent(memTypeIndex) ?
8245 VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
8246 (VkDeviceSize)VMA_MIN_ALIGNMENT;
8249 bool IsIntegratedGpu()
const
8251 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
8254 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
8256 #if VMA_RECORDING_ENABLED
8257 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
8260 void GetBufferMemoryRequirements(
8262 VkMemoryRequirements& memReq,
8263 bool& requiresDedicatedAllocation,
8264 bool& prefersDedicatedAllocation)
const;
8265 void GetImageMemoryRequirements(
8267 VkMemoryRequirements& memReq,
8268 bool& requiresDedicatedAllocation,
8269 bool& prefersDedicatedAllocation)
const;
8272 VkResult AllocateMemory(
8273 const VkMemoryRequirements& vkMemReq,
8274 bool requiresDedicatedAllocation,
8275 bool prefersDedicatedAllocation,
8276 VkBuffer dedicatedBuffer,
8277 VkBufferUsageFlags dedicatedBufferUsage,
8278 VkImage dedicatedImage,
8280 VmaSuballocationType suballocType,
8281 size_t allocationCount,
8286 size_t allocationCount,
8289 void CalculateStats(
VmaStats* pStats);
8292 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
8294 #if VMA_STATS_STRING_ENABLED
8295 void PrintDetailedMap(
class VmaJsonWriter& json);
8298 VkResult DefragmentationBegin(
8302 VkResult DefragmentationEnd(
8305 VkResult DefragmentationPassBegin(
8308 VkResult DefragmentationPassEnd(
8315 void DestroyPool(
VmaPool pool);
8318 void SetCurrentFrameIndex(uint32_t frameIndex);
8319 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8321 void MakePoolAllocationsLost(
8323 size_t* pLostAllocationCount);
8324 VkResult CheckPoolCorruption(
VmaPool hPool);
8325 VkResult CheckCorruption(uint32_t memoryTypeBits);
8330 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8332 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8334 VkResult BindVulkanBuffer(
8335 VkDeviceMemory memory,
8336 VkDeviceSize memoryOffset,
8340 VkResult BindVulkanImage(
8341 VkDeviceMemory memory,
8342 VkDeviceSize memoryOffset,
8349 VkResult BindBufferMemory(
8351 VkDeviceSize allocationLocalOffset,
8354 VkResult BindImageMemory(
8356 VkDeviceSize allocationLocalOffset,
8360 VkResult FlushOrInvalidateAllocation(
8362 VkDeviceSize offset, VkDeviceSize size,
8363 VMA_CACHE_OPERATION op);
8364 VkResult FlushOrInvalidateAllocations(
8365 uint32_t allocationCount,
8367 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8368 VMA_CACHE_OPERATION op);
8370 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8376 uint32_t GetGpuDefragmentationMemoryTypeBits();
8379 VkDeviceSize m_PreferredLargeHeapBlockSize;
8381 VkPhysicalDevice m_PhysicalDevice;
8382 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8383 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8385 VMA_RW_MUTEX m_PoolsMutex;
8386 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
8389 uint32_t m_NextPoolId;
8394 uint32_t m_GlobalMemoryTypeBits;
8396 #if VMA_RECORDING_ENABLED
8397 VmaRecorder* m_pRecorder;
8402 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8403 void ImportVulkanFunctions_Static();
8408 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8409 void ImportVulkanFunctions_Dynamic();
8412 void ValidateVulkanFunctions();
8414 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8416 VkResult AllocateMemoryOfType(
8418 VkDeviceSize alignment,
8419 bool dedicatedAllocation,
8420 VkBuffer dedicatedBuffer,
8421 VkBufferUsageFlags dedicatedBufferUsage,
8422 VkImage dedicatedImage,
8424 uint32_t memTypeIndex,
8425 VmaSuballocationType suballocType,
8426 size_t allocationCount,
8430 VkResult AllocateDedicatedMemoryPage(
8432 VmaSuballocationType suballocType,
8433 uint32_t memTypeIndex,
8434 const VkMemoryAllocateInfo& allocInfo,
8436 bool isUserDataString,
8441 VkResult AllocateDedicatedMemory(
8443 VmaSuballocationType suballocType,
8444 uint32_t memTypeIndex,
8447 bool isUserDataString,
8450 VkBuffer dedicatedBuffer,
8451 VkBufferUsageFlags dedicatedBufferUsage,
8452 VkImage dedicatedImage,
8453 size_t allocationCount,
8462 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8464 uint32_t CalculateGlobalMemoryTypeBits()
const;
8466 bool GetFlushOrInvalidateRange(
8468 VkDeviceSize offset, VkDeviceSize size,
8469 VkMappedMemoryRange& outRange)
const;
8471 #if VMA_MEMORY_BUDGET
8472 void UpdateVulkanBudget();
8479 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8481 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8484 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8486 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8489 template<
typename T>
8492 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8495 template<
typename T>
8496 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8498 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8501 template<
typename T>
8502 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8507 VmaFree(hAllocator, ptr);
8511 template<
typename T>
8512 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8516 for(
size_t i = count; i--; )
8518 VmaFree(hAllocator, ptr);
8525 #if VMA_STATS_STRING_ENABLED
8527 class VmaStringBuilder
8530 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8531 size_t GetLength()
const {
return m_Data.size(); }
8532 const char* GetData()
const {
return m_Data.data(); }
8534 void Add(
char ch) { m_Data.push_back(ch); }
8535 void Add(
const char* pStr);
8536 void AddNewLine() { Add(
'\n'); }
8537 void AddNumber(uint32_t num);
8538 void AddNumber(uint64_t num);
8539 void AddPointer(
const void* ptr);
8542 VmaVector< char, VmaStlAllocator<char> > m_Data;
8545 void VmaStringBuilder::Add(
const char* pStr)
8547 const size_t strLen = strlen(pStr);
8550 const size_t oldCount = m_Data.size();
8551 m_Data.resize(oldCount + strLen);
8552 memcpy(m_Data.data() + oldCount, pStr, strLen);
8556 void VmaStringBuilder::AddNumber(uint32_t num)
8563 *--p =
'0' + (num % 10);
8570 void VmaStringBuilder::AddNumber(uint64_t num)
8577 *--p =
'0' + (num % 10);
8584 void VmaStringBuilder::AddPointer(
const void* ptr)
8587 VmaPtrToStr(buf,
sizeof(buf), ptr);
8596 #if VMA_STATS_STRING_ENABLED
8600 VMA_CLASS_NO_COPY(VmaJsonWriter)
8602 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8605 void BeginObject(
bool singleLine =
false);
8608 void BeginArray(
bool singleLine =
false);
8611 void WriteString(
const char* pStr);
8612 void BeginString(
const char* pStr = VMA_NULL);
8613 void ContinueString(
const char* pStr);
8614 void ContinueString(uint32_t n);
8615 void ContinueString(uint64_t n);
8616 void ContinueString_Pointer(
const void* ptr);
8617 void EndString(
const char* pStr = VMA_NULL);
8619 void WriteNumber(uint32_t n);
8620 void WriteNumber(uint64_t n);
8621 void WriteBool(
bool b);
8625 static const char*
const INDENT;
8627 enum COLLECTION_TYPE
8629 COLLECTION_TYPE_OBJECT,
8630 COLLECTION_TYPE_ARRAY,
8634 COLLECTION_TYPE type;
8635 uint32_t valueCount;
8636 bool singleLineMode;
8639 VmaStringBuilder& m_SB;
8640 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8641 bool m_InsideString;
8643 void BeginValue(
bool isString);
8644 void WriteIndent(
bool oneLess =
false);
8647 const char*
const VmaJsonWriter::INDENT =
" ";
8649 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8651 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8652 m_InsideString(false)
8656 VmaJsonWriter::~VmaJsonWriter()
8658 VMA_ASSERT(!m_InsideString);
8659 VMA_ASSERT(m_Stack.empty());
8662 void VmaJsonWriter::BeginObject(
bool singleLine)
8664 VMA_ASSERT(!m_InsideString);
8670 item.type = COLLECTION_TYPE_OBJECT;
8671 item.valueCount = 0;
8672 item.singleLineMode = singleLine;
8673 m_Stack.push_back(item);
8676 void VmaJsonWriter::EndObject()
8678 VMA_ASSERT(!m_InsideString);
8683 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8687 void VmaJsonWriter::BeginArray(
bool singleLine)
8689 VMA_ASSERT(!m_InsideString);
8695 item.type = COLLECTION_TYPE_ARRAY;
8696 item.valueCount = 0;
8697 item.singleLineMode = singleLine;
8698 m_Stack.push_back(item);
8701 void VmaJsonWriter::EndArray()
8703 VMA_ASSERT(!m_InsideString);
8708 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8712 void VmaJsonWriter::WriteString(
const char* pStr)
8718 void VmaJsonWriter::BeginString(
const char* pStr)
8720 VMA_ASSERT(!m_InsideString);
8724 m_InsideString =
true;
8725 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8727 ContinueString(pStr);
8731 void VmaJsonWriter::ContinueString(
const char* pStr)
8733 VMA_ASSERT(m_InsideString);
8735 const size_t strLen = strlen(pStr);
8736 for(
size_t i = 0; i < strLen; ++i)
8769 VMA_ASSERT(0 &&
"Character not currently supported.");
8775 void VmaJsonWriter::ContinueString(uint32_t n)
8777 VMA_ASSERT(m_InsideString);
8781 void VmaJsonWriter::ContinueString(uint64_t n)
8783 VMA_ASSERT(m_InsideString);
8787 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8789 VMA_ASSERT(m_InsideString);
8790 m_SB.AddPointer(ptr);
8793 void VmaJsonWriter::EndString(
const char* pStr)
8795 VMA_ASSERT(m_InsideString);
8796 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8798 ContinueString(pStr);
8801 m_InsideString =
false;
8804 void VmaJsonWriter::WriteNumber(uint32_t n)
8806 VMA_ASSERT(!m_InsideString);
8811 void VmaJsonWriter::WriteNumber(uint64_t n)
8813 VMA_ASSERT(!m_InsideString);
8818 void VmaJsonWriter::WriteBool(
bool b)
8820 VMA_ASSERT(!m_InsideString);
8822 m_SB.Add(b ?
"true" :
"false");
8825 void VmaJsonWriter::WriteNull()
8827 VMA_ASSERT(!m_InsideString);
8832 void VmaJsonWriter::BeginValue(
bool isString)
8834 if(!m_Stack.empty())
8836 StackItem& currItem = m_Stack.back();
8837 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8838 currItem.valueCount % 2 == 0)
8840 VMA_ASSERT(isString);
8843 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8844 currItem.valueCount % 2 != 0)
8848 else if(currItem.valueCount > 0)
8857 ++currItem.valueCount;
8861 void VmaJsonWriter::WriteIndent(
bool oneLess)
8863 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8867 size_t count = m_Stack.size();
8868 if(count > 0 && oneLess)
8872 for(
size_t i = 0; i < count; ++i)
8883 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8885 if(IsUserDataString())
8887 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8889 FreeUserDataString(hAllocator);
8891 if(pUserData != VMA_NULL)
8893 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8898 m_pUserData = pUserData;
8902 void VmaAllocation_T::ChangeBlockAllocation(
8904 VmaDeviceMemoryBlock* block,
8905 VkDeviceSize offset)
8907 VMA_ASSERT(block != VMA_NULL);
8908 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8911 if(block != m_BlockAllocation.m_Block)
8913 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8914 if(IsPersistentMap())
8916 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8917 block->Map(hAllocator, mapRefCount, VMA_NULL);
8920 m_BlockAllocation.m_Block = block;
8921 m_BlockAllocation.m_Offset = offset;
8924 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8926 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8927 m_BlockAllocation.m_Offset = newOffset;
8930 VkDeviceSize VmaAllocation_T::GetOffset()
const
8934 case ALLOCATION_TYPE_BLOCK:
8935 return m_BlockAllocation.m_Offset;
8936 case ALLOCATION_TYPE_DEDICATED:
8944 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8948 case ALLOCATION_TYPE_BLOCK:
8949 return m_BlockAllocation.m_Block->GetDeviceMemory();
8950 case ALLOCATION_TYPE_DEDICATED:
8951 return m_DedicatedAllocation.m_hMemory;
8954 return VK_NULL_HANDLE;
8958 void* VmaAllocation_T::GetMappedData()
const
8962 case ALLOCATION_TYPE_BLOCK:
8965 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8966 VMA_ASSERT(pBlockData != VMA_NULL);
8967 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8974 case ALLOCATION_TYPE_DEDICATED:
8975 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8976 return m_DedicatedAllocation.m_pMappedData;
8983 bool VmaAllocation_T::CanBecomeLost()
const
8987 case ALLOCATION_TYPE_BLOCK:
8988 return m_BlockAllocation.m_CanBecomeLost;
8989 case ALLOCATION_TYPE_DEDICATED:
8997 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8999 VMA_ASSERT(CanBecomeLost());
9005 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
9008 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9013 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
9019 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
9029 #if VMA_STATS_STRING_ENABLED
9032 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
9041 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
9043 json.WriteString(
"Type");
9044 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
9046 json.WriteString(
"Size");
9047 json.WriteNumber(m_Size);
9049 if(m_pUserData != VMA_NULL)
9051 json.WriteString(
"UserData");
9052 if(IsUserDataString())
9054 json.WriteString((
const char*)m_pUserData);
9059 json.ContinueString_Pointer(m_pUserData);
9064 json.WriteString(
"CreationFrameIndex");
9065 json.WriteNumber(m_CreationFrameIndex);
9067 json.WriteString(
"LastUseFrameIndex");
9068 json.WriteNumber(GetLastUseFrameIndex());
9070 if(m_BufferImageUsage != 0)
9072 json.WriteString(
"Usage");
9073 json.WriteNumber(m_BufferImageUsage);
9079 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
9081 VMA_ASSERT(IsUserDataString());
9082 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
9083 m_pUserData = VMA_NULL;
9086 void VmaAllocation_T::BlockAllocMap()
9088 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9090 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9096 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
9100 void VmaAllocation_T::BlockAllocUnmap()
9102 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9104 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9110 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
9114 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
9116 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9120 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9122 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
9123 *ppData = m_DedicatedAllocation.m_pMappedData;
9129 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
9130 return VK_ERROR_MEMORY_MAP_FAILED;
9135 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9136 hAllocator->m_hDevice,
9137 m_DedicatedAllocation.m_hMemory,
9142 if(result == VK_SUCCESS)
9144 m_DedicatedAllocation.m_pMappedData = *ppData;
9151 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
9153 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9155 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9160 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
9161 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
9162 hAllocator->m_hDevice,
9163 m_DedicatedAllocation.m_hMemory);
9168 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
9172 #if VMA_STATS_STRING_ENABLED
9174 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
9178 json.WriteString(
"Blocks");
9181 json.WriteString(
"Allocations");
9184 json.WriteString(
"UnusedRanges");
9187 json.WriteString(
"UsedBytes");
9190 json.WriteString(
"UnusedBytes");
9195 json.WriteString(
"AllocationSize");
9196 json.BeginObject(
true);
9197 json.WriteString(
"Min");
9199 json.WriteString(
"Avg");
9201 json.WriteString(
"Max");
9208 json.WriteString(
"UnusedRangeSize");
9209 json.BeginObject(
true);
9210 json.WriteString(
"Min");
9212 json.WriteString(
"Avg");
9214 json.WriteString(
"Max");
9224 struct VmaSuballocationItemSizeLess
9227 const VmaSuballocationList::iterator lhs,
9228 const VmaSuballocationList::iterator rhs)
const
9230 return lhs->size < rhs->size;
9233 const VmaSuballocationList::iterator lhs,
9234 VkDeviceSize rhsSize)
const
9236 return lhs->size < rhsSize;
9244 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
9246 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
9250 #if VMA_STATS_STRING_ENABLED
9252 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
9253 VkDeviceSize unusedBytes,
9254 size_t allocationCount,
9255 size_t unusedRangeCount)
const
9259 json.WriteString(
"TotalBytes");
9260 json.WriteNumber(GetSize());
9262 json.WriteString(
"UnusedBytes");
9263 json.WriteNumber(unusedBytes);
9265 json.WriteString(
"Allocations");
9266 json.WriteNumber((uint64_t)allocationCount);
9268 json.WriteString(
"UnusedRanges");
9269 json.WriteNumber((uint64_t)unusedRangeCount);
9271 json.WriteString(
"Suballocations");
9275 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
9276 VkDeviceSize offset,
9279 json.BeginObject(
true);
9281 json.WriteString(
"Offset");
9282 json.WriteNumber(offset);
9284 hAllocation->PrintParameters(json);
9289 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
9290 VkDeviceSize offset,
9291 VkDeviceSize size)
const
9293 json.BeginObject(
true);
9295 json.WriteString(
"Offset");
9296 json.WriteNumber(offset);
9298 json.WriteString(
"Type");
9299 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
9301 json.WriteString(
"Size");
9302 json.WriteNumber(size);
9307 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
9318 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
9319 VmaBlockMetadata(hAllocator),
9322 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9323 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9327 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9331 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9333 VmaBlockMetadata::Init(size);
9336 m_SumFreeSize = size;
9338 VmaSuballocation suballoc = {};
9339 suballoc.offset = 0;
9340 suballoc.size = size;
9341 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9342 suballoc.hAllocation = VK_NULL_HANDLE;
9344 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9345 m_Suballocations.push_back(suballoc);
9346 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9348 m_FreeSuballocationsBySize.push_back(suballocItem);
9351 bool VmaBlockMetadata_Generic::Validate()
const
9353 VMA_VALIDATE(!m_Suballocations.empty());
9356 VkDeviceSize calculatedOffset = 0;
9358 uint32_t calculatedFreeCount = 0;
9360 VkDeviceSize calculatedSumFreeSize = 0;
9363 size_t freeSuballocationsToRegister = 0;
9365 bool prevFree =
false;
9367 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9368 suballocItem != m_Suballocations.cend();
9371 const VmaSuballocation& subAlloc = *suballocItem;
9374 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9376 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9378 VMA_VALIDATE(!prevFree || !currFree);
9380 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9384 calculatedSumFreeSize += subAlloc.size;
9385 ++calculatedFreeCount;
9386 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9388 ++freeSuballocationsToRegister;
9392 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9396 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9397 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9400 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9403 calculatedOffset += subAlloc.size;
9404 prevFree = currFree;
9409 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9411 VkDeviceSize lastSize = 0;
9412 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9414 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9417 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9419 VMA_VALIDATE(suballocItem->size >= lastSize);
9421 lastSize = suballocItem->size;
9425 VMA_VALIDATE(ValidateFreeSuballocationList());
9426 VMA_VALIDATE(calculatedOffset == GetSize());
9427 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9428 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9433 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9435 if(!m_FreeSuballocationsBySize.empty())
9437 return m_FreeSuballocationsBySize.back()->size;
9445 bool VmaBlockMetadata_Generic::IsEmpty()
const
9447 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9450 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9454 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9466 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9467 suballocItem != m_Suballocations.cend();
9470 const VmaSuballocation& suballoc = *suballocItem;
9471 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9484 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9486 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9488 inoutStats.
size += GetSize();
9495 #if VMA_STATS_STRING_ENABLED
9497 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9499 PrintDetailedMap_Begin(json,
9501 m_Suballocations.size() - (
size_t)m_FreeCount,
9505 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9506 suballocItem != m_Suballocations.cend();
9507 ++suballocItem, ++i)
9509 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9511 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9515 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9519 PrintDetailedMap_End(json);
9524 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9525 uint32_t currentFrameIndex,
9526 uint32_t frameInUseCount,
9527 VkDeviceSize bufferImageGranularity,
9528 VkDeviceSize allocSize,
9529 VkDeviceSize allocAlignment,
9531 VmaSuballocationType allocType,
9532 bool canMakeOtherLost,
9534 VmaAllocationRequest* pAllocationRequest)
9536 VMA_ASSERT(allocSize > 0);
9537 VMA_ASSERT(!upperAddress);
9538 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9539 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9540 VMA_HEAVY_ASSERT(Validate());
9542 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9545 if(canMakeOtherLost ==
false &&
9546 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9552 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9553 if(freeSuballocCount > 0)
9558 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9559 m_FreeSuballocationsBySize.data(),
9560 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9561 allocSize + 2 * VMA_DEBUG_MARGIN,
9562 VmaSuballocationItemSizeLess());
9563 size_t index = it - m_FreeSuballocationsBySize.data();
9564 for(; index < freeSuballocCount; ++index)
9569 bufferImageGranularity,
9573 m_FreeSuballocationsBySize[index],
9575 &pAllocationRequest->offset,
9576 &pAllocationRequest->itemsToMakeLostCount,
9577 &pAllocationRequest->sumFreeSize,
9578 &pAllocationRequest->sumItemSize))
9580 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9585 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9587 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9588 it != m_Suballocations.end();
9591 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9594 bufferImageGranularity,
9600 &pAllocationRequest->offset,
9601 &pAllocationRequest->itemsToMakeLostCount,
9602 &pAllocationRequest->sumFreeSize,
9603 &pAllocationRequest->sumItemSize))
9605 pAllocationRequest->item = it;
9613 for(
size_t index = freeSuballocCount; index--; )
9618 bufferImageGranularity,
9622 m_FreeSuballocationsBySize[index],
9624 &pAllocationRequest->offset,
9625 &pAllocationRequest->itemsToMakeLostCount,
9626 &pAllocationRequest->sumFreeSize,
9627 &pAllocationRequest->sumItemSize))
9629 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9636 if(canMakeOtherLost)
9641 VmaAllocationRequest tmpAllocRequest = {};
9642 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9643 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9644 suballocIt != m_Suballocations.end();
9647 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9648 suballocIt->hAllocation->CanBecomeLost())
9653 bufferImageGranularity,
9659 &tmpAllocRequest.offset,
9660 &tmpAllocRequest.itemsToMakeLostCount,
9661 &tmpAllocRequest.sumFreeSize,
9662 &tmpAllocRequest.sumItemSize))
9666 *pAllocationRequest = tmpAllocRequest;
9667 pAllocationRequest->item = suballocIt;
9670 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9672 *pAllocationRequest = tmpAllocRequest;
9673 pAllocationRequest->item = suballocIt;
9686 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9687 uint32_t currentFrameIndex,
9688 uint32_t frameInUseCount,
9689 VmaAllocationRequest* pAllocationRequest)
9691 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9693 while(pAllocationRequest->itemsToMakeLostCount > 0)
9695 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9697 ++pAllocationRequest->item;
9699 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9700 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9701 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9702 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9704 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9705 --pAllocationRequest->itemsToMakeLostCount;
9713 VMA_HEAVY_ASSERT(Validate());
9714 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9715 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9720 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9722 uint32_t lostAllocationCount = 0;
9723 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9724 it != m_Suballocations.end();
9727 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9728 it->hAllocation->CanBecomeLost() &&
9729 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9731 it = FreeSuballocation(it);
9732 ++lostAllocationCount;
9735 return lostAllocationCount;
9738 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9740 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9741 it != m_Suballocations.end();
9744 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9746 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9748 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9749 return VK_ERROR_VALIDATION_FAILED_EXT;
9751 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9753 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9754 return VK_ERROR_VALIDATION_FAILED_EXT;
9762 void VmaBlockMetadata_Generic::Alloc(
9763 const VmaAllocationRequest& request,
9764 VmaSuballocationType type,
9765 VkDeviceSize allocSize,
9768 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9769 VMA_ASSERT(request.item != m_Suballocations.end());
9770 VmaSuballocation& suballoc = *request.item;
9772 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9774 VMA_ASSERT(request.offset >= suballoc.offset);
9775 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9776 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9777 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9781 UnregisterFreeSuballocation(request.item);
9783 suballoc.offset = request.offset;
9784 suballoc.size = allocSize;
9785 suballoc.type = type;
9786 suballoc.hAllocation = hAllocation;
9791 VmaSuballocation paddingSuballoc = {};
9792 paddingSuballoc.offset = request.offset + allocSize;
9793 paddingSuballoc.size = paddingEnd;
9794 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9795 VmaSuballocationList::iterator next = request.item;
9797 const VmaSuballocationList::iterator paddingEndItem =
9798 m_Suballocations.insert(next, paddingSuballoc);
9799 RegisterFreeSuballocation(paddingEndItem);
9805 VmaSuballocation paddingSuballoc = {};
9806 paddingSuballoc.offset = request.offset - paddingBegin;
9807 paddingSuballoc.size = paddingBegin;
9808 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9809 const VmaSuballocationList::iterator paddingBeginItem =
9810 m_Suballocations.insert(request.item, paddingSuballoc);
9811 RegisterFreeSuballocation(paddingBeginItem);
9815 m_FreeCount = m_FreeCount - 1;
9816 if(paddingBegin > 0)
9824 m_SumFreeSize -= allocSize;
9827 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9829 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9830 suballocItem != m_Suballocations.end();
9833 VmaSuballocation& suballoc = *suballocItem;
9834 if(suballoc.hAllocation == allocation)
9836 FreeSuballocation(suballocItem);
9837 VMA_HEAVY_ASSERT(Validate());
9841 VMA_ASSERT(0 &&
"Not found!");
9844 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9846 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9847 suballocItem != m_Suballocations.end();
9850 VmaSuballocation& suballoc = *suballocItem;
9851 if(suballoc.offset == offset)
9853 FreeSuballocation(suballocItem);
9857 VMA_ASSERT(0 &&
"Not found!");
9860 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9862 VkDeviceSize lastSize = 0;
9863 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9865 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9867 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9868 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9869 VMA_VALIDATE(it->size >= lastSize);
9870 lastSize = it->size;
9875 bool VmaBlockMetadata_Generic::CheckAllocation(
9876 uint32_t currentFrameIndex,
9877 uint32_t frameInUseCount,
9878 VkDeviceSize bufferImageGranularity,
9879 VkDeviceSize allocSize,
9880 VkDeviceSize allocAlignment,
9881 VmaSuballocationType allocType,
9882 VmaSuballocationList::const_iterator suballocItem,
9883 bool canMakeOtherLost,
9884 VkDeviceSize* pOffset,
9885 size_t* itemsToMakeLostCount,
9886 VkDeviceSize* pSumFreeSize,
9887 VkDeviceSize* pSumItemSize)
const
9889 VMA_ASSERT(allocSize > 0);
9890 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9891 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9892 VMA_ASSERT(pOffset != VMA_NULL);
9894 *itemsToMakeLostCount = 0;
9898 if(canMakeOtherLost)
9900 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9902 *pSumFreeSize = suballocItem->size;
9906 if(suballocItem->hAllocation->CanBecomeLost() &&
9907 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9909 ++*itemsToMakeLostCount;
9910 *pSumItemSize = suballocItem->size;
9919 if(GetSize() - suballocItem->offset < allocSize)
9925 *pOffset = suballocItem->offset;
9928 if(VMA_DEBUG_MARGIN > 0)
9930 *pOffset += VMA_DEBUG_MARGIN;
9934 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9938 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
9940 bool bufferImageGranularityConflict =
false;
9941 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9942 while(prevSuballocItem != m_Suballocations.cbegin())
9945 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9946 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9948 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9950 bufferImageGranularityConflict =
true;
9958 if(bufferImageGranularityConflict)
9960 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9966 if(*pOffset >= suballocItem->offset + suballocItem->size)
9972 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9975 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9977 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9979 if(suballocItem->offset + totalSize > GetSize())
9986 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9987 if(totalSize > suballocItem->size)
9989 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9990 while(remainingSize > 0)
9993 if(lastSuballocItem == m_Suballocations.cend())
9997 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9999 *pSumFreeSize += lastSuballocItem->size;
10003 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
10004 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
10005 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10007 ++*itemsToMakeLostCount;
10008 *pSumItemSize += lastSuballocItem->size;
10015 remainingSize = (lastSuballocItem->size < remainingSize) ?
10016 remainingSize - lastSuballocItem->size : 0;
10022 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10024 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
10025 ++nextSuballocItem;
10026 while(nextSuballocItem != m_Suballocations.cend())
10028 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10029 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10031 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10033 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
10034 if(nextSuballoc.hAllocation->CanBecomeLost() &&
10035 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10037 ++*itemsToMakeLostCount;
10050 ++nextSuballocItem;
10056 const VmaSuballocation& suballoc = *suballocItem;
10057 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10059 *pSumFreeSize = suballoc.size;
10062 if(suballoc.size < allocSize)
10068 *pOffset = suballoc.offset;
10071 if(VMA_DEBUG_MARGIN > 0)
10073 *pOffset += VMA_DEBUG_MARGIN;
10077 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
10081 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
10083 bool bufferImageGranularityConflict =
false;
10084 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
10085 while(prevSuballocItem != m_Suballocations.cbegin())
10087 --prevSuballocItem;
10088 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10089 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10091 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10093 bufferImageGranularityConflict =
true;
10101 if(bufferImageGranularityConflict)
10103 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10108 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
10111 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10114 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
10121 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10123 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
10124 ++nextSuballocItem;
10125 while(nextSuballocItem != m_Suballocations.cend())
10127 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10128 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10130 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10140 ++nextSuballocItem;
10149 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
10151 VMA_ASSERT(item != m_Suballocations.end());
10152 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10154 VmaSuballocationList::iterator nextItem = item;
10156 VMA_ASSERT(nextItem != m_Suballocations.end());
10157 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
10159 item->size += nextItem->size;
10161 m_Suballocations.erase(nextItem);
10164 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
10167 VmaSuballocation& suballoc = *suballocItem;
10168 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10169 suballoc.hAllocation = VK_NULL_HANDLE;
10173 m_SumFreeSize += suballoc.size;
10176 bool mergeWithNext =
false;
10177 bool mergeWithPrev =
false;
10179 VmaSuballocationList::iterator nextItem = suballocItem;
10181 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
10183 mergeWithNext =
true;
10186 VmaSuballocationList::iterator prevItem = suballocItem;
10187 if(suballocItem != m_Suballocations.begin())
10190 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10192 mergeWithPrev =
true;
10198 UnregisterFreeSuballocation(nextItem);
10199 MergeFreeWithNext(suballocItem);
10204 UnregisterFreeSuballocation(prevItem);
10205 MergeFreeWithNext(prevItem);
10206 RegisterFreeSuballocation(prevItem);
10211 RegisterFreeSuballocation(suballocItem);
10212 return suballocItem;
10216 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
10218 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10219 VMA_ASSERT(item->size > 0);
10223 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10225 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10227 if(m_FreeSuballocationsBySize.empty())
10229 m_FreeSuballocationsBySize.push_back(item);
10233 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
10241 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
10243 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10244 VMA_ASSERT(item->size > 0);
10248 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10250 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10252 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
10253 m_FreeSuballocationsBySize.data(),
10254 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
10256 VmaSuballocationItemSizeLess());
10257 for(
size_t index = it - m_FreeSuballocationsBySize.data();
10258 index < m_FreeSuballocationsBySize.size();
10261 if(m_FreeSuballocationsBySize[index] == item)
10263 VmaVectorRemove(m_FreeSuballocationsBySize, index);
10266 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
10268 VMA_ASSERT(0 &&
"Not found.");
10274 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
10275 VkDeviceSize bufferImageGranularity,
10276 VmaSuballocationType& inOutPrevSuballocType)
const
10278 if(bufferImageGranularity == 1 || IsEmpty())
10283 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
10284 bool typeConflictFound =
false;
10285 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
10286 it != m_Suballocations.cend();
10289 const VmaSuballocationType suballocType = it->type;
10290 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
10292 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
10293 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
10295 typeConflictFound =
true;
10297 inOutPrevSuballocType = suballocType;
10301 return typeConflictFound || minAlignment >= bufferImageGranularity;
10307 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
10308 VmaBlockMetadata(hAllocator),
10310 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10311 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10312 m_1stVectorIndex(0),
10313 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
10314 m_1stNullItemsBeginCount(0),
10315 m_1stNullItemsMiddleCount(0),
10316 m_2ndNullItemsCount(0)
10320 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10324 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10326 VmaBlockMetadata::Init(size);
10327 m_SumFreeSize = size;
10330 bool VmaBlockMetadata_Linear::Validate()
const
10332 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10333 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10335 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10336 VMA_VALIDATE(!suballocations1st.empty() ||
10337 suballocations2nd.empty() ||
10338 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10340 if(!suballocations1st.empty())
10343 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10345 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10347 if(!suballocations2nd.empty())
10350 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10353 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10354 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10356 VkDeviceSize sumUsedSize = 0;
10357 const size_t suballoc1stCount = suballocations1st.size();
10358 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10360 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10362 const size_t suballoc2ndCount = suballocations2nd.size();
10363 size_t nullItem2ndCount = 0;
10364 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10366 const VmaSuballocation& suballoc = suballocations2nd[i];
10367 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10369 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10370 VMA_VALIDATE(suballoc.offset >= offset);
10374 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10375 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10376 sumUsedSize += suballoc.size;
10380 ++nullItem2ndCount;
10383 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10386 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10389 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10391 const VmaSuballocation& suballoc = suballocations1st[i];
10392 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10393 suballoc.hAllocation == VK_NULL_HANDLE);
10396 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10398 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10400 const VmaSuballocation& suballoc = suballocations1st[i];
10401 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10403 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10404 VMA_VALIDATE(suballoc.offset >= offset);
10405 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10409 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10410 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10411 sumUsedSize += suballoc.size;
10415 ++nullItem1stCount;
10418 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10420 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10422 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10424 const size_t suballoc2ndCount = suballocations2nd.size();
10425 size_t nullItem2ndCount = 0;
10426 for(
size_t i = suballoc2ndCount; i--; )
10428 const VmaSuballocation& suballoc = suballocations2nd[i];
10429 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10431 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10432 VMA_VALIDATE(suballoc.offset >= offset);
10436 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10437 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10438 sumUsedSize += suballoc.size;
10442 ++nullItem2ndCount;
10445 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10448 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10451 VMA_VALIDATE(offset <= GetSize());
10452 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10457 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10459 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10460 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10463 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10465 const VkDeviceSize size = GetSize();
10477 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10479 switch(m_2ndVectorMode)
10481 case SECOND_VECTOR_EMPTY:
10487 const size_t suballocations1stCount = suballocations1st.size();
10488 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10489 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10490 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10492 firstSuballoc.offset,
10493 size - (lastSuballoc.offset + lastSuballoc.size));
10497 case SECOND_VECTOR_RING_BUFFER:
10502 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10503 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10504 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10505 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10509 case SECOND_VECTOR_DOUBLE_STACK:
10514 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10515 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10516 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10517 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10527 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10529 const VkDeviceSize size = GetSize();
10530 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10531 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10532 const size_t suballoc1stCount = suballocations1st.size();
10533 const size_t suballoc2ndCount = suballocations2nd.size();
10544 VkDeviceSize lastOffset = 0;
10546 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10548 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10549 size_t nextAlloc2ndIndex = 0;
10550 while(lastOffset < freeSpace2ndTo1stEnd)
10553 while(nextAlloc2ndIndex < suballoc2ndCount &&
10554 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10556 ++nextAlloc2ndIndex;
10560 if(nextAlloc2ndIndex < suballoc2ndCount)
10562 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10565 if(lastOffset < suballoc.offset)
10568 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10582 lastOffset = suballoc.offset + suballoc.size;
10583 ++nextAlloc2ndIndex;
10589 if(lastOffset < freeSpace2ndTo1stEnd)
10591 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10599 lastOffset = freeSpace2ndTo1stEnd;
10604 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10605 const VkDeviceSize freeSpace1stTo2ndEnd =
10606 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10607 while(lastOffset < freeSpace1stTo2ndEnd)
10610 while(nextAlloc1stIndex < suballoc1stCount &&
10611 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10613 ++nextAlloc1stIndex;
10617 if(nextAlloc1stIndex < suballoc1stCount)
10619 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10622 if(lastOffset < suballoc.offset)
10625 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10639 lastOffset = suballoc.offset + suballoc.size;
10640 ++nextAlloc1stIndex;
10646 if(lastOffset < freeSpace1stTo2ndEnd)
10648 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10656 lastOffset = freeSpace1stTo2ndEnd;
10660 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10662 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10663 while(lastOffset < size)
10666 while(nextAlloc2ndIndex != SIZE_MAX &&
10667 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10669 --nextAlloc2ndIndex;
10673 if(nextAlloc2ndIndex != SIZE_MAX)
10675 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10678 if(lastOffset < suballoc.offset)
10681 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10695 lastOffset = suballoc.offset + suballoc.size;
10696 --nextAlloc2ndIndex;
10702 if(lastOffset < size)
10704 const VkDeviceSize unusedRangeSize = size - lastOffset;
10720 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10722 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10723 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10724 const VkDeviceSize size = GetSize();
10725 const size_t suballoc1stCount = suballocations1st.size();
10726 const size_t suballoc2ndCount = suballocations2nd.size();
10728 inoutStats.
size += size;
10730 VkDeviceSize lastOffset = 0;
10732 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10734 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10735 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10736 while(lastOffset < freeSpace2ndTo1stEnd)
10739 while(nextAlloc2ndIndex < suballoc2ndCount &&
10740 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10742 ++nextAlloc2ndIndex;
10746 if(nextAlloc2ndIndex < suballoc2ndCount)
10748 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10751 if(lastOffset < suballoc.offset)
10754 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10765 lastOffset = suballoc.offset + suballoc.size;
10766 ++nextAlloc2ndIndex;
10771 if(lastOffset < freeSpace2ndTo1stEnd)
10774 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10781 lastOffset = freeSpace2ndTo1stEnd;
10786 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10787 const VkDeviceSize freeSpace1stTo2ndEnd =
10788 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10789 while(lastOffset < freeSpace1stTo2ndEnd)
10792 while(nextAlloc1stIndex < suballoc1stCount &&
10793 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10795 ++nextAlloc1stIndex;
10799 if(nextAlloc1stIndex < suballoc1stCount)
10801 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10804 if(lastOffset < suballoc.offset)
10807 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10818 lastOffset = suballoc.offset + suballoc.size;
10819 ++nextAlloc1stIndex;
10824 if(lastOffset < freeSpace1stTo2ndEnd)
10827 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10834 lastOffset = freeSpace1stTo2ndEnd;
10838 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10840 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10841 while(lastOffset < size)
10844 while(nextAlloc2ndIndex != SIZE_MAX &&
10845 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10847 --nextAlloc2ndIndex;
10851 if(nextAlloc2ndIndex != SIZE_MAX)
10853 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10856 if(lastOffset < suballoc.offset)
10859 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10870 lastOffset = suballoc.offset + suballoc.size;
10871 --nextAlloc2ndIndex;
10876 if(lastOffset < size)
10879 const VkDeviceSize unusedRangeSize = size - lastOffset;
10892 #if VMA_STATS_STRING_ENABLED
10893 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10895 const VkDeviceSize size = GetSize();
10896 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10897 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10898 const size_t suballoc1stCount = suballocations1st.size();
10899 const size_t suballoc2ndCount = suballocations2nd.size();
10903 size_t unusedRangeCount = 0;
10904 VkDeviceSize usedBytes = 0;
10906 VkDeviceSize lastOffset = 0;
10908 size_t alloc2ndCount = 0;
10909 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10911 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10912 size_t nextAlloc2ndIndex = 0;
10913 while(lastOffset < freeSpace2ndTo1stEnd)
10916 while(nextAlloc2ndIndex < suballoc2ndCount &&
10917 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10919 ++nextAlloc2ndIndex;
10923 if(nextAlloc2ndIndex < suballoc2ndCount)
10925 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10928 if(lastOffset < suballoc.offset)
10931 ++unusedRangeCount;
10937 usedBytes += suballoc.size;
10940 lastOffset = suballoc.offset + suballoc.size;
10941 ++nextAlloc2ndIndex;
10946 if(lastOffset < freeSpace2ndTo1stEnd)
10949 ++unusedRangeCount;
10953 lastOffset = freeSpace2ndTo1stEnd;
10958 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10959 size_t alloc1stCount = 0;
10960 const VkDeviceSize freeSpace1stTo2ndEnd =
10961 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10962 while(lastOffset < freeSpace1stTo2ndEnd)
10965 while(nextAlloc1stIndex < suballoc1stCount &&
10966 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10968 ++nextAlloc1stIndex;
10972 if(nextAlloc1stIndex < suballoc1stCount)
10974 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10977 if(lastOffset < suballoc.offset)
10980 ++unusedRangeCount;
10986 usedBytes += suballoc.size;
10989 lastOffset = suballoc.offset + suballoc.size;
10990 ++nextAlloc1stIndex;
10995 if(lastOffset < size)
10998 ++unusedRangeCount;
11002 lastOffset = freeSpace1stTo2ndEnd;
11006 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11008 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11009 while(lastOffset < size)
11012 while(nextAlloc2ndIndex != SIZE_MAX &&
11013 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11015 --nextAlloc2ndIndex;
11019 if(nextAlloc2ndIndex != SIZE_MAX)
11021 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11024 if(lastOffset < suballoc.offset)
11027 ++unusedRangeCount;
11033 usedBytes += suballoc.size;
11036 lastOffset = suballoc.offset + suballoc.size;
11037 --nextAlloc2ndIndex;
11042 if(lastOffset < size)
11045 ++unusedRangeCount;
11054 const VkDeviceSize unusedBytes = size - usedBytes;
11055 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
11060 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11062 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
11063 size_t nextAlloc2ndIndex = 0;
11064 while(lastOffset < freeSpace2ndTo1stEnd)
11067 while(nextAlloc2ndIndex < suballoc2ndCount &&
11068 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11070 ++nextAlloc2ndIndex;
11074 if(nextAlloc2ndIndex < suballoc2ndCount)
11076 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11079 if(lastOffset < suballoc.offset)
11082 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11083 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11088 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11091 lastOffset = suballoc.offset + suballoc.size;
11092 ++nextAlloc2ndIndex;
11097 if(lastOffset < freeSpace2ndTo1stEnd)
11100 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
11101 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11105 lastOffset = freeSpace2ndTo1stEnd;
11110 nextAlloc1stIndex = m_1stNullItemsBeginCount;
11111 while(lastOffset < freeSpace1stTo2ndEnd)
11114 while(nextAlloc1stIndex < suballoc1stCount &&
11115 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11117 ++nextAlloc1stIndex;
11121 if(nextAlloc1stIndex < suballoc1stCount)
11123 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11126 if(lastOffset < suballoc.offset)
11129 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11130 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11135 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11138 lastOffset = suballoc.offset + suballoc.size;
11139 ++nextAlloc1stIndex;
11144 if(lastOffset < freeSpace1stTo2ndEnd)
11147 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
11148 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11152 lastOffset = freeSpace1stTo2ndEnd;
11156 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11158 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11159 while(lastOffset < size)
11162 while(nextAlloc2ndIndex != SIZE_MAX &&
11163 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11165 --nextAlloc2ndIndex;
11169 if(nextAlloc2ndIndex != SIZE_MAX)
11171 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11174 if(lastOffset < suballoc.offset)
11177 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11178 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11183 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11186 lastOffset = suballoc.offset + suballoc.size;
11187 --nextAlloc2ndIndex;
11192 if(lastOffset < size)
11195 const VkDeviceSize unusedRangeSize = size - lastOffset;
11196 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11205 PrintDetailedMap_End(json);
11209 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
11210 uint32_t currentFrameIndex,
11211 uint32_t frameInUseCount,
11212 VkDeviceSize bufferImageGranularity,
11213 VkDeviceSize allocSize,
11214 VkDeviceSize allocAlignment,
11216 VmaSuballocationType allocType,
11217 bool canMakeOtherLost,
11219 VmaAllocationRequest* pAllocationRequest)
11221 VMA_ASSERT(allocSize > 0);
11222 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
11223 VMA_ASSERT(pAllocationRequest != VMA_NULL);
11224 VMA_HEAVY_ASSERT(Validate());
11225 return upperAddress ?
11226 CreateAllocationRequest_UpperAddress(
11227 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11228 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
11229 CreateAllocationRequest_LowerAddress(
11230 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11231 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
11234 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
11235 uint32_t currentFrameIndex,
11236 uint32_t frameInUseCount,
11237 VkDeviceSize bufferImageGranularity,
11238 VkDeviceSize allocSize,
11239 VkDeviceSize allocAlignment,
11240 VmaSuballocationType allocType,
11241 bool canMakeOtherLost,
11243 VmaAllocationRequest* pAllocationRequest)
11245 const VkDeviceSize size = GetSize();
11246 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11247 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11249 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11251 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
11256 if(allocSize > size)
11260 VkDeviceSize resultBaseOffset = size - allocSize;
11261 if(!suballocations2nd.empty())
11263 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11264 resultBaseOffset = lastSuballoc.offset - allocSize;
11265 if(allocSize > lastSuballoc.offset)
11272 VkDeviceSize resultOffset = resultBaseOffset;
11275 if(VMA_DEBUG_MARGIN > 0)
11277 if(resultOffset < VMA_DEBUG_MARGIN)
11281 resultOffset -= VMA_DEBUG_MARGIN;
11285 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
11289 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11291 bool bufferImageGranularityConflict =
false;
11292 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11294 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11295 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11297 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
11299 bufferImageGranularityConflict =
true;
11307 if(bufferImageGranularityConflict)
11309 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
11314 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
11315 suballocations1st.back().offset + suballocations1st.back().size :
11317 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
11321 if(bufferImageGranularity > 1)
11323 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11325 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11326 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11328 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11342 pAllocationRequest->offset = resultOffset;
11343 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11344 pAllocationRequest->sumItemSize = 0;
11346 pAllocationRequest->itemsToMakeLostCount = 0;
11347 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11354 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11355 uint32_t currentFrameIndex,
11356 uint32_t frameInUseCount,
11357 VkDeviceSize bufferImageGranularity,
11358 VkDeviceSize allocSize,
11359 VkDeviceSize allocAlignment,
11360 VmaSuballocationType allocType,
11361 bool canMakeOtherLost,
11363 VmaAllocationRequest* pAllocationRequest)
11365 const VkDeviceSize size = GetSize();
11366 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11367 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11369 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11373 VkDeviceSize resultBaseOffset = 0;
11374 if(!suballocations1st.empty())
11376 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11377 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11381 VkDeviceSize resultOffset = resultBaseOffset;
11384 if(VMA_DEBUG_MARGIN > 0)
11386 resultOffset += VMA_DEBUG_MARGIN;
11390 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11394 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
11396 bool bufferImageGranularityConflict =
false;
11397 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11399 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11400 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11402 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11404 bufferImageGranularityConflict =
true;
11412 if(bufferImageGranularityConflict)
11414 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11418 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11419 suballocations2nd.back().offset : size;
11422 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11426 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11428 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11430 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11431 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11433 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11447 pAllocationRequest->offset = resultOffset;
11448 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11449 pAllocationRequest->sumItemSize = 0;
11451 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11452 pAllocationRequest->itemsToMakeLostCount = 0;
11459 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11461 VMA_ASSERT(!suballocations1st.empty());
11463 VkDeviceSize resultBaseOffset = 0;
11464 if(!suballocations2nd.empty())
11466 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11467 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11471 VkDeviceSize resultOffset = resultBaseOffset;
11474 if(VMA_DEBUG_MARGIN > 0)
11476 resultOffset += VMA_DEBUG_MARGIN;
11480 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11484 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11486 bool bufferImageGranularityConflict =
false;
11487 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11489 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11490 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11492 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11494 bufferImageGranularityConflict =
true;
11502 if(bufferImageGranularityConflict)
11504 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11508 pAllocationRequest->itemsToMakeLostCount = 0;
11509 pAllocationRequest->sumItemSize = 0;
11510 size_t index1st = m_1stNullItemsBeginCount;
11512 if(canMakeOtherLost)
11514 while(index1st < suballocations1st.size() &&
11515 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11518 const VmaSuballocation& suballoc = suballocations1st[index1st];
11519 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11525 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11526 if(suballoc.hAllocation->CanBecomeLost() &&
11527 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11529 ++pAllocationRequest->itemsToMakeLostCount;
11530 pAllocationRequest->sumItemSize += suballoc.size;
11542 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11544 while(index1st < suballocations1st.size())
11546 const VmaSuballocation& suballoc = suballocations1st[index1st];
11547 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11549 if(suballoc.hAllocation != VK_NULL_HANDLE)
11552 if(suballoc.hAllocation->CanBecomeLost() &&
11553 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11555 ++pAllocationRequest->itemsToMakeLostCount;
11556 pAllocationRequest->sumItemSize += suballoc.size;
11574 if(index1st == suballocations1st.size() &&
11575 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11578 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11583 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11584 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11588 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11590 for(
size_t nextSuballocIndex = index1st;
11591 nextSuballocIndex < suballocations1st.size();
11592 nextSuballocIndex++)
11594 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11595 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11597 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11611 pAllocationRequest->offset = resultOffset;
11612 pAllocationRequest->sumFreeSize =
11613 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11615 - pAllocationRequest->sumItemSize;
11616 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11625 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11626 uint32_t currentFrameIndex,
11627 uint32_t frameInUseCount,
11628 VmaAllocationRequest* pAllocationRequest)
11630 if(pAllocationRequest->itemsToMakeLostCount == 0)
11635 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11638 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11639 size_t index = m_1stNullItemsBeginCount;
11640 size_t madeLostCount = 0;
11641 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11643 if(index == suballocations->size())
11647 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11649 suballocations = &AccessSuballocations2nd();
11653 VMA_ASSERT(!suballocations->empty());
11655 VmaSuballocation& suballoc = (*suballocations)[index];
11656 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11658 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11659 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11660 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11662 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11663 suballoc.hAllocation = VK_NULL_HANDLE;
11664 m_SumFreeSize += suballoc.size;
11665 if(suballocations == &AccessSuballocations1st())
11667 ++m_1stNullItemsMiddleCount;
11671 ++m_2ndNullItemsCount;
11683 CleanupAfterFree();
11689 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11691 uint32_t lostAllocationCount = 0;
11693 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11694 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11696 VmaSuballocation& suballoc = suballocations1st[i];
11697 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11698 suballoc.hAllocation->CanBecomeLost() &&
11699 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11701 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11702 suballoc.hAllocation = VK_NULL_HANDLE;
11703 ++m_1stNullItemsMiddleCount;
11704 m_SumFreeSize += suballoc.size;
11705 ++lostAllocationCount;
11709 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11710 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11712 VmaSuballocation& suballoc = suballocations2nd[i];
11713 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11714 suballoc.hAllocation->CanBecomeLost() &&
11715 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11717 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11718 suballoc.hAllocation = VK_NULL_HANDLE;
11719 ++m_2ndNullItemsCount;
11720 m_SumFreeSize += suballoc.size;
11721 ++lostAllocationCount;
11725 if(lostAllocationCount)
11727 CleanupAfterFree();
11730 return lostAllocationCount;
11733 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11735 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11736 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11738 const VmaSuballocation& suballoc = suballocations1st[i];
11739 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11741 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11743 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11744 return VK_ERROR_VALIDATION_FAILED_EXT;
11746 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11748 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11749 return VK_ERROR_VALIDATION_FAILED_EXT;
11754 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11755 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11757 const VmaSuballocation& suballoc = suballocations2nd[i];
11758 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11760 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11762 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11763 return VK_ERROR_VALIDATION_FAILED_EXT;
11765 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11767 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11768 return VK_ERROR_VALIDATION_FAILED_EXT;
11776 void VmaBlockMetadata_Linear::Alloc(
11777 const VmaAllocationRequest& request,
11778 VmaSuballocationType type,
11779 VkDeviceSize allocSize,
11782 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11784 switch(request.type)
11786 case VmaAllocationRequestType::UpperAddress:
11788 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11789 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11790 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11791 suballocations2nd.push_back(newSuballoc);
11792 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11795 case VmaAllocationRequestType::EndOf1st:
11797 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11799 VMA_ASSERT(suballocations1st.empty() ||
11800 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11802 VMA_ASSERT(request.offset + allocSize <= GetSize());
11804 suballocations1st.push_back(newSuballoc);
11807 case VmaAllocationRequestType::EndOf2nd:
11809 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11811 VMA_ASSERT(!suballocations1st.empty() &&
11812 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11813 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11815 switch(m_2ndVectorMode)
11817 case SECOND_VECTOR_EMPTY:
11819 VMA_ASSERT(suballocations2nd.empty());
11820 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11822 case SECOND_VECTOR_RING_BUFFER:
11824 VMA_ASSERT(!suballocations2nd.empty());
11826 case SECOND_VECTOR_DOUBLE_STACK:
11827 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11833 suballocations2nd.push_back(newSuballoc);
11837 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11840 m_SumFreeSize -= newSuballoc.size;
11843 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11845 FreeAtOffset(allocation->GetOffset());
11848 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11850 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11851 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11853 if(!suballocations1st.empty())
11856 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11857 if(firstSuballoc.offset == offset)
11859 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11860 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11861 m_SumFreeSize += firstSuballoc.size;
11862 ++m_1stNullItemsBeginCount;
11863 CleanupAfterFree();
11869 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11870 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11872 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11873 if(lastSuballoc.offset == offset)
11875 m_SumFreeSize += lastSuballoc.size;
11876 suballocations2nd.pop_back();
11877 CleanupAfterFree();
11882 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11884 VmaSuballocation& lastSuballoc = suballocations1st.back();
11885 if(lastSuballoc.offset == offset)
11887 m_SumFreeSize += lastSuballoc.size;
11888 suballocations1st.pop_back();
11889 CleanupAfterFree();
11896 VmaSuballocation refSuballoc;
11897 refSuballoc.offset = offset;
11899 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11900 suballocations1st.begin() + m_1stNullItemsBeginCount,
11901 suballocations1st.end(),
11903 VmaSuballocationOffsetLess());
11904 if(it != suballocations1st.end())
11906 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11907 it->hAllocation = VK_NULL_HANDLE;
11908 ++m_1stNullItemsMiddleCount;
11909 m_SumFreeSize += it->size;
11910 CleanupAfterFree();
11915 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11918 VmaSuballocation refSuballoc;
11919 refSuballoc.offset = offset;
11921 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11922 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11923 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11924 if(it != suballocations2nd.end())
11926 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11927 it->hAllocation = VK_NULL_HANDLE;
11928 ++m_2ndNullItemsCount;
11929 m_SumFreeSize += it->size;
11930 CleanupAfterFree();
11935 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11938 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11940 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11941 const size_t suballocCount = AccessSuballocations1st().size();
11942 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11945 void VmaBlockMetadata_Linear::CleanupAfterFree()
11947 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11948 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11952 suballocations1st.clear();
11953 suballocations2nd.clear();
11954 m_1stNullItemsBeginCount = 0;
11955 m_1stNullItemsMiddleCount = 0;
11956 m_2ndNullItemsCount = 0;
11957 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11961 const size_t suballoc1stCount = suballocations1st.size();
11962 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11963 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11966 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11967 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11969 ++m_1stNullItemsBeginCount;
11970 --m_1stNullItemsMiddleCount;
11974 while(m_1stNullItemsMiddleCount > 0 &&
11975 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11977 --m_1stNullItemsMiddleCount;
11978 suballocations1st.pop_back();
11982 while(m_2ndNullItemsCount > 0 &&
11983 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11985 --m_2ndNullItemsCount;
11986 suballocations2nd.pop_back();
11990 while(m_2ndNullItemsCount > 0 &&
11991 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11993 --m_2ndNullItemsCount;
11994 VmaVectorRemove(suballocations2nd, 0);
11997 if(ShouldCompact1st())
11999 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
12000 size_t srcIndex = m_1stNullItemsBeginCount;
12001 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
12003 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
12007 if(dstIndex != srcIndex)
12009 suballocations1st[dstIndex] = suballocations1st[srcIndex];
12013 suballocations1st.resize(nonNullItemCount);
12014 m_1stNullItemsBeginCount = 0;
12015 m_1stNullItemsMiddleCount = 0;
12019 if(suballocations2nd.empty())
12021 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12025 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
12027 suballocations1st.clear();
12028 m_1stNullItemsBeginCount = 0;
12030 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
12033 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12034 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
12035 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
12036 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12038 ++m_1stNullItemsBeginCount;
12039 --m_1stNullItemsMiddleCount;
12041 m_2ndNullItemsCount = 0;
12042 m_1stVectorIndex ^= 1;
12047 VMA_HEAVY_ASSERT(Validate());
12054 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
12055 VmaBlockMetadata(hAllocator),
12057 m_AllocationCount(0),
12061 memset(m_FreeList, 0,
sizeof(m_FreeList));
12064 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
12066 DeleteNode(m_Root);
12069 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
12071 VmaBlockMetadata::Init(size);
12073 m_UsableSize = VmaPrevPow2(size);
12074 m_SumFreeSize = m_UsableSize;
12078 while(m_LevelCount < MAX_LEVELS &&
12079 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
12084 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
12085 rootNode->offset = 0;
12086 rootNode->type = Node::TYPE_FREE;
12087 rootNode->parent = VMA_NULL;
12088 rootNode->buddy = VMA_NULL;
12091 AddToFreeListFront(0, rootNode);
12094 bool VmaBlockMetadata_Buddy::Validate()
const
12097 ValidationContext ctx;
12098 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
12100 VMA_VALIDATE(
false &&
"ValidateNode failed.");
12102 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
12103 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
12106 for(uint32_t level = 0; level < m_LevelCount; ++level)
12108 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
12109 m_FreeList[level].front->free.prev == VMA_NULL);
12111 for(Node* node = m_FreeList[level].front;
12113 node = node->free.next)
12115 VMA_VALIDATE(node->type == Node::TYPE_FREE);
12117 if(node->free.next == VMA_NULL)
12119 VMA_VALIDATE(m_FreeList[level].back == node);
12123 VMA_VALIDATE(node->free.next->free.prev == node);
12129 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
12131 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
12137 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
12139 for(uint32_t level = 0; level < m_LevelCount; ++level)
12141 if(m_FreeList[level].front != VMA_NULL)
12143 return LevelToNodeSize(level);
12149 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
12151 const VkDeviceSize unusableSize = GetUnusableSize();
12162 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
12164 if(unusableSize > 0)
12173 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
12175 const VkDeviceSize unusableSize = GetUnusableSize();
12177 inoutStats.
size += GetSize();
12178 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
12183 if(unusableSize > 0)
12190 #if VMA_STATS_STRING_ENABLED
12192 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
12196 CalcAllocationStatInfo(stat);
12198 PrintDetailedMap_Begin(
12204 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
12206 const VkDeviceSize unusableSize = GetUnusableSize();
12207 if(unusableSize > 0)
12209 PrintDetailedMap_UnusedRange(json,
12214 PrintDetailedMap_End(json);
12219 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
12220 uint32_t currentFrameIndex,
12221 uint32_t frameInUseCount,
12222 VkDeviceSize bufferImageGranularity,
12223 VkDeviceSize allocSize,
12224 VkDeviceSize allocAlignment,
12226 VmaSuballocationType allocType,
12227 bool canMakeOtherLost,
12229 VmaAllocationRequest* pAllocationRequest)
12231 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
12235 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
12236 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
12237 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
12239 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
12240 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
12243 if(allocSize > m_UsableSize)
12248 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12249 for(uint32_t level = targetLevel + 1; level--; )
12251 for(Node* freeNode = m_FreeList[level].front;
12252 freeNode != VMA_NULL;
12253 freeNode = freeNode->free.next)
12255 if(freeNode->offset % allocAlignment == 0)
12257 pAllocationRequest->type = VmaAllocationRequestType::Normal;
12258 pAllocationRequest->offset = freeNode->offset;
12259 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
12260 pAllocationRequest->sumItemSize = 0;
12261 pAllocationRequest->itemsToMakeLostCount = 0;
12262 pAllocationRequest->customData = (
void*)(uintptr_t)level;
12271 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
12272 uint32_t currentFrameIndex,
12273 uint32_t frameInUseCount,
12274 VmaAllocationRequest* pAllocationRequest)
12280 return pAllocationRequest->itemsToMakeLostCount == 0;
12283 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
12292 void VmaBlockMetadata_Buddy::Alloc(
12293 const VmaAllocationRequest& request,
12294 VmaSuballocationType type,
12295 VkDeviceSize allocSize,
12298 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
12300 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12301 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
12303 Node* currNode = m_FreeList[currLevel].front;
12304 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12305 while(currNode->offset != request.offset)
12307 currNode = currNode->free.next;
12308 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12312 while(currLevel < targetLevel)
12316 RemoveFromFreeList(currLevel, currNode);
12318 const uint32_t childrenLevel = currLevel + 1;
12321 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12322 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12324 leftChild->offset = currNode->offset;
12325 leftChild->type = Node::TYPE_FREE;
12326 leftChild->parent = currNode;
12327 leftChild->buddy = rightChild;
12329 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12330 rightChild->type = Node::TYPE_FREE;
12331 rightChild->parent = currNode;
12332 rightChild->buddy = leftChild;
12335 currNode->type = Node::TYPE_SPLIT;
12336 currNode->split.leftChild = leftChild;
12339 AddToFreeListFront(childrenLevel, rightChild);
12340 AddToFreeListFront(childrenLevel, leftChild);
12345 currNode = m_FreeList[currLevel].front;
12354 VMA_ASSERT(currLevel == targetLevel &&
12355 currNode != VMA_NULL &&
12356 currNode->type == Node::TYPE_FREE);
12357 RemoveFromFreeList(currLevel, currNode);
12360 currNode->type = Node::TYPE_ALLOCATION;
12361 currNode->allocation.alloc = hAllocation;
12363 ++m_AllocationCount;
12365 m_SumFreeSize -= allocSize;
12368 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12370 if(node->type == Node::TYPE_SPLIT)
12372 DeleteNode(node->split.leftChild->buddy);
12373 DeleteNode(node->split.leftChild);
12376 vma_delete(GetAllocationCallbacks(), node);
12379 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12381 VMA_VALIDATE(level < m_LevelCount);
12382 VMA_VALIDATE(curr->parent == parent);
12383 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12384 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12387 case Node::TYPE_FREE:
12389 ctx.calculatedSumFreeSize += levelNodeSize;
12390 ++ctx.calculatedFreeCount;
12392 case Node::TYPE_ALLOCATION:
12393 ++ctx.calculatedAllocationCount;
12394 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12395 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12397 case Node::TYPE_SPLIT:
12399 const uint32_t childrenLevel = level + 1;
12400 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12401 const Node*
const leftChild = curr->split.leftChild;
12402 VMA_VALIDATE(leftChild != VMA_NULL);
12403 VMA_VALIDATE(leftChild->offset == curr->offset);
12404 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12406 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12408 const Node*
const rightChild = leftChild->buddy;
12409 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12410 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12412 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12423 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12426 uint32_t level = 0;
12427 VkDeviceSize currLevelNodeSize = m_UsableSize;
12428 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12429 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12432 currLevelNodeSize = nextLevelNodeSize;
12433 nextLevelNodeSize = currLevelNodeSize >> 1;
12438 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12441 Node* node = m_Root;
12442 VkDeviceSize nodeOffset = 0;
12443 uint32_t level = 0;
12444 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12445 while(node->type == Node::TYPE_SPLIT)
12447 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12448 if(offset < nodeOffset + nextLevelSize)
12450 node = node->split.leftChild;
12454 node = node->split.leftChild->buddy;
12455 nodeOffset += nextLevelSize;
12458 levelNodeSize = nextLevelSize;
12461 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12462 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12465 --m_AllocationCount;
12466 m_SumFreeSize += alloc->GetSize();
12468 node->type = Node::TYPE_FREE;
12471 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12473 RemoveFromFreeList(level, node->buddy);
12474 Node*
const parent = node->parent;
12476 vma_delete(GetAllocationCallbacks(), node->buddy);
12477 vma_delete(GetAllocationCallbacks(), node);
12478 parent->type = Node::TYPE_FREE;
12486 AddToFreeListFront(level, node);
12489 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12493 case Node::TYPE_FREE:
12499 case Node::TYPE_ALLOCATION:
12501 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12507 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12508 if(unusedRangeSize > 0)
12517 case Node::TYPE_SPLIT:
12519 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12520 const Node*
const leftChild = node->split.leftChild;
12521 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12522 const Node*
const rightChild = leftChild->buddy;
12523 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12531 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12533 VMA_ASSERT(node->type == Node::TYPE_FREE);
12536 Node*
const frontNode = m_FreeList[level].front;
12537 if(frontNode == VMA_NULL)
12539 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12540 node->free.prev = node->free.next = VMA_NULL;
12541 m_FreeList[level].front = m_FreeList[level].back = node;
12545 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12546 node->free.prev = VMA_NULL;
12547 node->free.next = frontNode;
12548 frontNode->free.prev = node;
12549 m_FreeList[level].front = node;
12553 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12555 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12558 if(node->free.prev == VMA_NULL)
12560 VMA_ASSERT(m_FreeList[level].front == node);
12561 m_FreeList[level].front = node->free.next;
12565 Node*
const prevFreeNode = node->free.prev;
12566 VMA_ASSERT(prevFreeNode->free.next == node);
12567 prevFreeNode->free.next = node->free.next;
12571 if(node->free.next == VMA_NULL)
12573 VMA_ASSERT(m_FreeList[level].back == node);
12574 m_FreeList[level].back = node->free.prev;
12578 Node*
const nextFreeNode = node->free.next;
12579 VMA_ASSERT(nextFreeNode->free.prev == node);
12580 nextFreeNode->free.prev = node->free.prev;
12584 #if VMA_STATS_STRING_ENABLED
12585 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12589 case Node::TYPE_FREE:
12590 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12592 case Node::TYPE_ALLOCATION:
12594 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12595 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12596 if(allocSize < levelNodeSize)
12598 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12602 case Node::TYPE_SPLIT:
12604 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12605 const Node*
const leftChild = node->split.leftChild;
12606 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12607 const Node*
const rightChild = leftChild->buddy;
12608 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12621 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12622 m_pMetadata(VMA_NULL),
12623 m_MemoryTypeIndex(UINT32_MAX),
12625 m_hMemory(VK_NULL_HANDLE),
12627 m_pMappedData(VMA_NULL)
12631 void VmaDeviceMemoryBlock::Init(
12634 uint32_t newMemoryTypeIndex,
12635 VkDeviceMemory newMemory,
12636 VkDeviceSize newSize,
12638 uint32_t algorithm)
12640 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12642 m_hParentPool = hParentPool;
12643 m_MemoryTypeIndex = newMemoryTypeIndex;
12645 m_hMemory = newMemory;
12650 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12653 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12659 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12661 m_pMetadata->Init(newSize);
12664 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12668 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12670 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12671 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12672 m_hMemory = VK_NULL_HANDLE;
12674 vma_delete(allocator, m_pMetadata);
12675 m_pMetadata = VMA_NULL;
12678 bool VmaDeviceMemoryBlock::Validate()
const
12680 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12681 (m_pMetadata->GetSize() != 0));
12683 return m_pMetadata->Validate();
12686 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12688 void* pData =
nullptr;
12689 VkResult res = Map(hAllocator, 1, &pData);
12690 if(res != VK_SUCCESS)
12695 res = m_pMetadata->CheckCorruption(pData);
12697 Unmap(hAllocator, 1);
12702 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12709 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12710 if(m_MapCount != 0)
12712 m_MapCount += count;
12713 VMA_ASSERT(m_pMappedData != VMA_NULL);
12714 if(ppData != VMA_NULL)
12716 *ppData = m_pMappedData;
12722 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12723 hAllocator->m_hDevice,
12729 if(result == VK_SUCCESS)
12731 if(ppData != VMA_NULL)
12733 *ppData = m_pMappedData;
12735 m_MapCount = count;
12741 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12748 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12749 if(m_MapCount >= count)
12751 m_MapCount -= count;
12752 if(m_MapCount == 0)
12754 m_pMappedData = VMA_NULL;
12755 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12760 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12764 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12766 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12767 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12770 VkResult res = Map(hAllocator, 1, &pData);
12771 if(res != VK_SUCCESS)
12776 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12777 VmaWriteMagicValue(pData, allocOffset + allocSize);
12779 Unmap(hAllocator, 1);
12784 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12786 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12787 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12790 VkResult res = Map(hAllocator, 1, &pData);
12791 if(res != VK_SUCCESS)
12796 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12798 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12800 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12802 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12805 Unmap(hAllocator, 1);
12810 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12813 VkDeviceSize allocationLocalOffset,
12817 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12818 hAllocation->GetBlock() ==
this);
12819 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12820 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12821 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12823 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12824 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12827 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12830 VkDeviceSize allocationLocalOffset,
12834 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12835 hAllocation->GetBlock() ==
this);
12836 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12837 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12838 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12840 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12841 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12846 memset(&outInfo, 0,
sizeof(outInfo));
12865 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12873 VmaPool_T::VmaPool_T(
12876 VkDeviceSize preferredBlockSize) :
12880 createInfo.memoryTypeIndex,
12881 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12882 createInfo.minBlockCount,
12883 createInfo.maxBlockCount,
12885 createInfo.frameInUseCount,
12886 createInfo.blockSize != 0,
12888 createInfo.priority,
12889 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment)),
12895 VmaPool_T::~VmaPool_T()
12897 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
12900 void VmaPool_T::SetName(
const char* pName)
12902 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12903 VmaFreeString(allocs, m_Name);
12905 if(pName != VMA_NULL)
12907 m_Name = VmaCreateStringCopy(allocs, pName);
12915 #if VMA_STATS_STRING_ENABLED
12919 VmaBlockVector::VmaBlockVector(
12922 uint32_t memoryTypeIndex,
12923 VkDeviceSize preferredBlockSize,
12924 size_t minBlockCount,
12925 size_t maxBlockCount,
12926 VkDeviceSize bufferImageGranularity,
12927 uint32_t frameInUseCount,
12928 bool explicitBlockSize,
12929 uint32_t algorithm,
12931 VkDeviceSize minAllocationAlignment) :
12932 m_hAllocator(hAllocator),
12933 m_hParentPool(hParentPool),
12934 m_MemoryTypeIndex(memoryTypeIndex),
12935 m_PreferredBlockSize(preferredBlockSize),
12936 m_MinBlockCount(minBlockCount),
12937 m_MaxBlockCount(maxBlockCount),
12938 m_BufferImageGranularity(bufferImageGranularity),
12939 m_FrameInUseCount(frameInUseCount),
12940 m_ExplicitBlockSize(explicitBlockSize),
12941 m_Algorithm(algorithm),
12942 m_Priority(priority),
12943 m_MinAllocationAlignment(minAllocationAlignment),
12944 m_HasEmptyBlock(false),
12945 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12950 VmaBlockVector::~VmaBlockVector()
12952 for(
size_t i = m_Blocks.size(); i--; )
12954 m_Blocks[i]->Destroy(m_hAllocator);
12955 vma_delete(m_hAllocator, m_Blocks[i]);
12959 VkResult VmaBlockVector::CreateMinBlocks()
12961 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12963 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12964 if(res != VK_SUCCESS)
12972 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12974 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12976 const size_t blockCount = m_Blocks.size();
12985 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12987 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12988 VMA_ASSERT(pBlock);
12989 VMA_HEAVY_ASSERT(pBlock->Validate());
12990 pBlock->m_pMetadata->AddPoolStats(*pStats);
12994 bool VmaBlockVector::IsEmpty()
12996 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12997 return m_Blocks.empty();
13000 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
13002 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13003 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
13004 (VMA_DEBUG_MARGIN > 0) &&
13006 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
13009 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
13011 VkResult VmaBlockVector::Allocate(
13012 uint32_t currentFrameIndex,
13014 VkDeviceSize alignment,
13016 VmaSuballocationType suballocType,
13017 size_t allocationCount,
13021 VkResult res = VK_SUCCESS;
13023 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
13025 if(IsCorruptionDetectionEnabled())
13027 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13028 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13032 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13033 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13035 res = AllocatePage(
13041 pAllocations + allocIndex);
13042 if(res != VK_SUCCESS)
13049 if(res != VK_SUCCESS)
13052 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13053 while(allocIndex--)
13055 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
13056 const VkDeviceSize allocSize = alloc->GetSize();
13058 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
13060 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
13066 VkResult VmaBlockVector::AllocatePage(
13067 uint32_t currentFrameIndex,
13069 VkDeviceSize alignment,
13071 VmaSuballocationType suballocType,
13079 VkDeviceSize freeMemory;
13081 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13083 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13087 const bool canFallbackToDedicated = !IsCustomPool();
13088 const bool canCreateNewBlock =
13090 (m_Blocks.size() < m_MaxBlockCount) &&
13091 (freeMemory >= size || !canFallbackToDedicated);
13098 canMakeOtherLost =
false;
13102 if(isUpperAddress &&
13105 return VK_ERROR_FEATURE_NOT_PRESENT;
13119 return VK_ERROR_FEATURE_NOT_PRESENT;
13123 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
13125 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13133 if(!canMakeOtherLost || canCreateNewBlock)
13142 if(!m_Blocks.empty())
13144 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
13145 VMA_ASSERT(pCurrBlock);
13146 VkResult res = AllocateFromBlock(
13156 if(res == VK_SUCCESS)
13158 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
13168 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13170 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13171 VMA_ASSERT(pCurrBlock);
13172 VkResult res = AllocateFromBlock(
13182 if(res == VK_SUCCESS)
13184 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13192 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13194 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13195 VMA_ASSERT(pCurrBlock);
13196 VkResult res = AllocateFromBlock(
13206 if(res == VK_SUCCESS)
13208 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13216 if(canCreateNewBlock)
13219 VkDeviceSize newBlockSize = m_PreferredBlockSize;
13220 uint32_t newBlockSizeShift = 0;
13221 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
13223 if(!m_ExplicitBlockSize)
13226 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
13227 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
13229 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13230 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
13232 newBlockSize = smallerNewBlockSize;
13233 ++newBlockSizeShift;
13242 size_t newBlockIndex = 0;
13243 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13244 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13246 if(!m_ExplicitBlockSize)
13248 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
13250 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13251 if(smallerNewBlockSize >= size)
13253 newBlockSize = smallerNewBlockSize;
13254 ++newBlockSizeShift;
13255 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13256 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13265 if(res == VK_SUCCESS)
13267 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
13268 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
13270 res = AllocateFromBlock(
13280 if(res == VK_SUCCESS)
13282 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
13288 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13295 if(canMakeOtherLost)
13297 uint32_t tryIndex = 0;
13298 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
13300 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
13301 VmaAllocationRequest bestRequest = {};
13302 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
13308 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13310 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13311 VMA_ASSERT(pCurrBlock);
13312 VmaAllocationRequest currRequest = {};
13313 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13316 m_BufferImageGranularity,
13325 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13326 if(pBestRequestBlock == VMA_NULL ||
13327 currRequestCost < bestRequestCost)
13329 pBestRequestBlock = pCurrBlock;
13330 bestRequest = currRequest;
13331 bestRequestCost = currRequestCost;
13333 if(bestRequestCost == 0)
13344 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13346 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13347 VMA_ASSERT(pCurrBlock);
13348 VmaAllocationRequest currRequest = {};
13349 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13352 m_BufferImageGranularity,
13361 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13362 if(pBestRequestBlock == VMA_NULL ||
13363 currRequestCost < bestRequestCost ||
13366 pBestRequestBlock = pCurrBlock;
13367 bestRequest = currRequest;
13368 bestRequestCost = currRequestCost;
13370 if(bestRequestCost == 0 ||
13380 if(pBestRequestBlock != VMA_NULL)
13384 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13385 if(res != VK_SUCCESS)
13391 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13397 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13398 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13399 UpdateHasEmptyBlock();
13400 (*pAllocation)->InitBlockAllocation(
13402 bestRequest.offset,
13409 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13410 VMA_DEBUG_LOG(
" Returned from existing block");
13411 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13412 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13413 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13415 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13417 if(IsCorruptionDetectionEnabled())
13419 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13420 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13435 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13437 return VK_ERROR_TOO_MANY_OBJECTS;
13441 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13444 void VmaBlockVector::Free(
13447 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13449 bool budgetExceeded =
false;
13451 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13453 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13454 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13459 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13461 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13463 if(IsCorruptionDetectionEnabled())
13465 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13466 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13469 if(hAllocation->IsPersistentMap())
13471 pBlock->Unmap(m_hAllocator, 1);
13474 pBlock->m_pMetadata->Free(hAllocation);
13475 VMA_HEAVY_ASSERT(pBlock->Validate());
13477 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13479 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13481 if(pBlock->m_pMetadata->IsEmpty())
13484 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13486 pBlockToDelete = pBlock;
13493 else if(m_HasEmptyBlock && canDeleteBlock)
13495 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13496 if(pLastBlock->m_pMetadata->IsEmpty())
13498 pBlockToDelete = pLastBlock;
13499 m_Blocks.pop_back();
13503 UpdateHasEmptyBlock();
13504 IncrementallySortBlocks();
13509 if(pBlockToDelete != VMA_NULL)
13511 VMA_DEBUG_LOG(
" Deleted empty block");
13512 pBlockToDelete->Destroy(m_hAllocator);
13513 vma_delete(m_hAllocator, pBlockToDelete);
13517 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13519 VkDeviceSize result = 0;
13520 for(
size_t i = m_Blocks.size(); i--; )
13522 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13523 if(result >= m_PreferredBlockSize)
13531 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13533 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13535 if(m_Blocks[blockIndex] == pBlock)
13537 VmaVectorRemove(m_Blocks, blockIndex);
13544 void VmaBlockVector::IncrementallySortBlocks()
13549 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13551 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13553 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13560 VkResult VmaBlockVector::AllocateFromBlock(
13561 VmaDeviceMemoryBlock* pBlock,
13562 uint32_t currentFrameIndex,
13564 VkDeviceSize alignment,
13567 VmaSuballocationType suballocType,
13576 VmaAllocationRequest currRequest = {};
13577 if(pBlock->m_pMetadata->CreateAllocationRequest(
13580 m_BufferImageGranularity,
13590 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13594 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13595 if(res != VK_SUCCESS)
13601 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13602 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13603 UpdateHasEmptyBlock();
13604 (*pAllocation)->InitBlockAllocation(
13606 currRequest.offset,
13613 VMA_HEAVY_ASSERT(pBlock->Validate());
13614 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13615 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13616 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13618 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13620 if(IsCorruptionDetectionEnabled())
13622 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13623 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13627 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13630 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13632 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13633 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13634 allocInfo.allocationSize = blockSize;
13636 #if VMA_BUFFER_DEVICE_ADDRESS
13638 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13639 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13641 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13642 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13646 #if VMA_MEMORY_PRIORITY
13647 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
13648 if(m_hAllocator->m_UseExtMemoryPriority)
13650 priorityInfo.priority = m_Priority;
13651 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
13655 VkDeviceMemory mem = VK_NULL_HANDLE;
13656 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13665 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13671 allocInfo.allocationSize,
13675 m_Blocks.push_back(pBlock);
13676 if(pNewBlockIndex != VMA_NULL)
13678 *pNewBlockIndex = m_Blocks.size() - 1;
13684 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13685 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13686 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13688 const size_t blockCount = m_Blocks.size();
13689 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13693 BLOCK_FLAG_USED = 0x00000001,
13694 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13702 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13703 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13704 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13707 const size_t moveCount = moves.size();
13708 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13710 const VmaDefragmentationMove& move = moves[moveIndex];
13711 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13712 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13715 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13718 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13720 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13721 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13722 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13724 currBlockInfo.pMappedData = pBlock->GetMappedData();
13726 if(currBlockInfo.pMappedData == VMA_NULL)
13728 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13729 if(pDefragCtx->res == VK_SUCCESS)
13731 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13738 if(pDefragCtx->res == VK_SUCCESS)
13740 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13741 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13743 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13745 const VmaDefragmentationMove& move = moves[moveIndex];
13747 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13748 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13750 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13755 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13756 memRange.memory = pSrcBlock->GetDeviceMemory();
13757 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13758 memRange.size = VMA_MIN(
13759 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13760 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13761 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13766 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13767 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13768 static_cast<size_t>(move.size));
13770 if(IsCorruptionDetectionEnabled())
13772 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13773 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13779 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13780 memRange.memory = pDstBlock->GetDeviceMemory();
13781 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13782 memRange.size = VMA_MIN(
13783 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13784 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13785 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13792 for(
size_t blockIndex = blockCount; blockIndex--; )
13794 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13795 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13797 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13798 pBlock->Unmap(m_hAllocator, 1);
13803 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13804 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13805 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13806 VkCommandBuffer commandBuffer)
13808 const size_t blockCount = m_Blocks.size();
13810 pDefragCtx->blockContexts.resize(blockCount);
13811 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13814 const size_t moveCount = moves.size();
13815 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13817 const VmaDefragmentationMove& move = moves[moveIndex];
13822 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13823 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13827 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13831 VkBufferCreateInfo bufCreateInfo;
13832 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13834 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13836 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13837 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13838 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13840 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13841 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13842 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13843 if(pDefragCtx->res == VK_SUCCESS)
13845 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13846 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13853 if(pDefragCtx->res == VK_SUCCESS)
13855 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13857 const VmaDefragmentationMove& move = moves[moveIndex];
13859 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13860 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13862 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13864 VkBufferCopy region = {
13868 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13869 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13874 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13876 pDefragCtx->res = VK_NOT_READY;
13882 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13884 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13885 if(pBlock->m_pMetadata->IsEmpty())
13887 if(m_Blocks.size() > m_MinBlockCount)
13889 if(pDefragmentationStats != VMA_NULL)
13892 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13895 VmaVectorRemove(m_Blocks, blockIndex);
13896 pBlock->Destroy(m_hAllocator);
13897 vma_delete(m_hAllocator, pBlock);
13905 UpdateHasEmptyBlock();
13908 void VmaBlockVector::UpdateHasEmptyBlock()
13910 m_HasEmptyBlock =
false;
13911 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13913 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13914 if(pBlock->m_pMetadata->IsEmpty())
13916 m_HasEmptyBlock =
true;
13922 #if VMA_STATS_STRING_ENABLED
13924 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13926 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13928 json.BeginObject();
13932 const char* poolName = m_hParentPool->GetName();
13933 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13935 json.WriteString(
"Name");
13936 json.WriteString(poolName);
13939 json.WriteString(
"MemoryTypeIndex");
13940 json.WriteNumber(m_MemoryTypeIndex);
13942 json.WriteString(
"BlockSize");
13943 json.WriteNumber(m_PreferredBlockSize);
13945 json.WriteString(
"BlockCount");
13946 json.BeginObject(
true);
13947 if(m_MinBlockCount > 0)
13949 json.WriteString(
"Min");
13950 json.WriteNumber((uint64_t)m_MinBlockCount);
13952 if(m_MaxBlockCount < SIZE_MAX)
13954 json.WriteString(
"Max");
13955 json.WriteNumber((uint64_t)m_MaxBlockCount);
13957 json.WriteString(
"Cur");
13958 json.WriteNumber((uint64_t)m_Blocks.size());
13961 if(m_FrameInUseCount > 0)
13963 json.WriteString(
"FrameInUseCount");
13964 json.WriteNumber(m_FrameInUseCount);
13967 if(m_Algorithm != 0)
13969 json.WriteString(
"Algorithm");
13970 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13975 json.WriteString(
"PreferredBlockSize");
13976 json.WriteNumber(m_PreferredBlockSize);
13979 json.WriteString(
"Blocks");
13980 json.BeginObject();
13981 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13983 json.BeginString();
13984 json.ContinueString(m_Blocks[i]->GetId());
13987 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13996 void VmaBlockVector::Defragment(
13997 class VmaBlockVectorDefragmentationContext* pCtx,
13999 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
14000 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
14001 VkCommandBuffer commandBuffer)
14003 pCtx->res = VK_SUCCESS;
14005 const VkMemoryPropertyFlags memPropFlags =
14006 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
14007 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
14009 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
14011 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
14012 !IsCorruptionDetectionEnabled() &&
14013 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
14016 if(canDefragmentOnCpu || canDefragmentOnGpu)
14018 bool defragmentOnGpu;
14020 if(canDefragmentOnGpu != canDefragmentOnCpu)
14022 defragmentOnGpu = canDefragmentOnGpu;
14027 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
14028 m_hAllocator->IsIntegratedGpu();
14031 bool overlappingMoveSupported = !defragmentOnGpu;
14033 if(m_hAllocator->m_UseMutex)
14037 if(!m_Mutex.TryLockWrite())
14039 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
14045 m_Mutex.LockWrite();
14046 pCtx->mutexLocked =
true;
14050 pCtx->Begin(overlappingMoveSupported, flags);
14054 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
14055 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
14056 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
14059 if(pStats != VMA_NULL)
14061 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
14062 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
14065 VMA_ASSERT(bytesMoved <= maxBytesToMove);
14066 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
14067 if(defragmentOnGpu)
14069 maxGpuBytesToMove -= bytesMoved;
14070 maxGpuAllocationsToMove -= allocationsMoved;
14074 maxCpuBytesToMove -= bytesMoved;
14075 maxCpuAllocationsToMove -= allocationsMoved;
14081 if(m_hAllocator->m_UseMutex)
14082 m_Mutex.UnlockWrite();
14084 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
14085 pCtx->res = VK_NOT_READY;
14090 if(pCtx->res >= VK_SUCCESS)
14092 if(defragmentOnGpu)
14094 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
14098 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
14104 void VmaBlockVector::DefragmentationEnd(
14105 class VmaBlockVectorDefragmentationContext* pCtx,
14111 VMA_ASSERT(pCtx->mutexLocked ==
false);
14115 m_Mutex.LockWrite();
14116 pCtx->mutexLocked =
true;
14120 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
14123 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
14125 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
14126 if(blockCtx.hBuffer)
14128 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
14132 if(pCtx->res >= VK_SUCCESS)
14134 FreeEmptyBlocks(pStats);
14138 if(pCtx->mutexLocked)
14140 VMA_ASSERT(m_hAllocator->m_UseMutex);
14141 m_Mutex.UnlockWrite();
14145 uint32_t VmaBlockVector::ProcessDefragmentations(
14146 class VmaBlockVectorDefragmentationContext *pCtx,
14149 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14151 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
14153 for(uint32_t i = 0; i < moveCount; ++ i)
14155 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
14158 pMove->
memory = move.pDstBlock->GetDeviceMemory();
14159 pMove->
offset = move.dstOffset;
14164 pCtx->defragmentationMovesProcessed += moveCount;
14169 void VmaBlockVector::CommitDefragmentations(
14170 class VmaBlockVectorDefragmentationContext *pCtx,
14173 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14175 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
14177 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
14179 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
14180 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
14183 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
14184 FreeEmptyBlocks(pStats);
14187 size_t VmaBlockVector::CalcAllocationCount()
const
14190 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14192 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
14197 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
14199 if(m_BufferImageGranularity == 1)
14203 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
14204 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
14206 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
14207 VMA_ASSERT(m_Algorithm == 0);
14208 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
14209 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
14217 void VmaBlockVector::MakePoolAllocationsLost(
14218 uint32_t currentFrameIndex,
14219 size_t* pLostAllocationCount)
14221 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14222 size_t lostAllocationCount = 0;
14223 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14225 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14226 VMA_ASSERT(pBlock);
14227 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
14229 if(pLostAllocationCount != VMA_NULL)
14231 *pLostAllocationCount = lostAllocationCount;
14235 VkResult VmaBlockVector::CheckCorruption()
14237 if(!IsCorruptionDetectionEnabled())
14239 return VK_ERROR_FEATURE_NOT_PRESENT;
14242 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14243 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14245 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14246 VMA_ASSERT(pBlock);
14247 VkResult res = pBlock->CheckCorruption(m_hAllocator);
14248 if(res != VK_SUCCESS)
14256 void VmaBlockVector::AddStats(
VmaStats* pStats)
14258 const uint32_t memTypeIndex = m_MemoryTypeIndex;
14259 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
14261 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14263 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14265 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14266 VMA_ASSERT(pBlock);
14267 VMA_HEAVY_ASSERT(pBlock->Validate());
14269 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
14270 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14271 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14272 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14279 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
14281 VmaBlockVector* pBlockVector,
14282 uint32_t currentFrameIndex,
14283 bool overlappingMoveSupported) :
14284 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14285 m_AllocationCount(0),
14286 m_AllAllocations(false),
14288 m_AllocationsMoved(0),
14289 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
14292 const size_t blockCount = m_pBlockVector->m_Blocks.size();
14293 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14295 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
14296 pBlockInfo->m_OriginalBlockIndex = blockIndex;
14297 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
14298 m_Blocks.push_back(pBlockInfo);
14302 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
14305 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
14307 for(
size_t i = m_Blocks.size(); i--; )
14309 vma_delete(m_hAllocator, m_Blocks[i]);
14313 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14316 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
14318 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
14319 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
14320 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
14322 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
14323 (*it)->m_Allocations.push_back(allocInfo);
14330 ++m_AllocationCount;
14334 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
14335 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14336 VkDeviceSize maxBytesToMove,
14337 uint32_t maxAllocationsToMove,
14338 bool freeOldAllocations)
14340 if(m_Blocks.empty())
14353 size_t srcBlockMinIndex = 0;
14366 size_t srcBlockIndex = m_Blocks.size() - 1;
14367 size_t srcAllocIndex = SIZE_MAX;
14373 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14375 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14378 if(srcBlockIndex == srcBlockMinIndex)
14385 srcAllocIndex = SIZE_MAX;
14390 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14394 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14395 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14397 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14398 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14399 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14400 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14403 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14405 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14406 VmaAllocationRequest dstAllocRequest;
14407 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14408 m_CurrentFrameIndex,
14409 m_pBlockVector->GetFrameInUseCount(),
14410 m_pBlockVector->GetBufferImageGranularity(),
14417 &dstAllocRequest) &&
14419 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14421 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14424 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14425 (m_BytesMoved + size > maxBytesToMove))
14430 VmaDefragmentationMove move = {};
14431 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14432 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14433 move.srcOffset = srcOffset;
14434 move.dstOffset = dstAllocRequest.offset;
14436 move.hAllocation = allocInfo.m_hAllocation;
14437 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14438 move.pDstBlock = pDstBlockInfo->m_pBlock;
14440 moves.push_back(move);
14442 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14446 allocInfo.m_hAllocation);
14448 if(freeOldAllocations)
14450 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14451 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14454 if(allocInfo.m_pChanged != VMA_NULL)
14456 *allocInfo.m_pChanged = VK_TRUE;
14459 ++m_AllocationsMoved;
14460 m_BytesMoved += size;
14462 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14470 if(srcAllocIndex > 0)
14476 if(srcBlockIndex > 0)
14479 srcAllocIndex = SIZE_MAX;
14489 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14492 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14494 if(m_Blocks[i]->m_HasNonMovableAllocations)
14502 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14503 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14504 VkDeviceSize maxBytesToMove,
14505 uint32_t maxAllocationsToMove,
14508 if(!m_AllAllocations && m_AllocationCount == 0)
14513 const size_t blockCount = m_Blocks.size();
14514 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14516 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14518 if(m_AllAllocations)
14520 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14521 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14522 it != pMetadata->m_Suballocations.end();
14525 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14527 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14528 pBlockInfo->m_Allocations.push_back(allocInfo);
14533 pBlockInfo->CalcHasNonMovableAllocations();
14537 pBlockInfo->SortAllocationsByOffsetDescending();
14543 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14546 const uint32_t roundCount = 2;
14549 VkResult result = VK_SUCCESS;
14550 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14558 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14559 size_t dstBlockIndex, VkDeviceSize dstOffset,
14560 size_t srcBlockIndex, VkDeviceSize srcOffset)
14562 if(dstBlockIndex < srcBlockIndex)
14566 if(dstBlockIndex > srcBlockIndex)
14570 if(dstOffset < srcOffset)
14580 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14582 VmaBlockVector* pBlockVector,
14583 uint32_t currentFrameIndex,
14584 bool overlappingMoveSupported) :
14585 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14586 m_OverlappingMoveSupported(overlappingMoveSupported),
14587 m_AllocationCount(0),
14588 m_AllAllocations(false),
14590 m_AllocationsMoved(0),
14591 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14593 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14597 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14601 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14602 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14603 VkDeviceSize maxBytesToMove,
14604 uint32_t maxAllocationsToMove,
14607 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14609 const size_t blockCount = m_pBlockVector->GetBlockCount();
14610 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14615 PreprocessMetadata();
14619 m_BlockInfos.resize(blockCount);
14620 for(
size_t i = 0; i < blockCount; ++i)
14622 m_BlockInfos[i].origBlockIndex = i;
14625 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14626 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14627 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14632 FreeSpaceDatabase freeSpaceDb;
14634 size_t dstBlockInfoIndex = 0;
14635 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14636 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14637 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14638 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14639 VkDeviceSize dstOffset = 0;
14642 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14644 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14645 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14646 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14647 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14648 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14650 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14651 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14652 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14653 if(m_AllocationsMoved == maxAllocationsToMove ||
14654 m_BytesMoved + srcAllocSize > maxBytesToMove)
14659 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14661 VmaDefragmentationMove move = {};
14663 size_t freeSpaceInfoIndex;
14664 VkDeviceSize dstAllocOffset;
14665 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14666 freeSpaceInfoIndex, dstAllocOffset))
14668 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14669 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14670 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14673 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14675 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14679 VmaSuballocation suballoc = *srcSuballocIt;
14680 suballoc.offset = dstAllocOffset;
14681 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14682 m_BytesMoved += srcAllocSize;
14683 ++m_AllocationsMoved;
14685 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14687 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14688 srcSuballocIt = nextSuballocIt;
14690 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14692 move.srcBlockIndex = srcOrigBlockIndex;
14693 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14694 move.srcOffset = srcAllocOffset;
14695 move.dstOffset = dstAllocOffset;
14696 move.size = srcAllocSize;
14698 moves.push_back(move);
14705 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14707 VmaSuballocation suballoc = *srcSuballocIt;
14708 suballoc.offset = dstAllocOffset;
14709 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14710 m_BytesMoved += srcAllocSize;
14711 ++m_AllocationsMoved;
14713 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14715 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14716 srcSuballocIt = nextSuballocIt;
14718 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14720 move.srcBlockIndex = srcOrigBlockIndex;
14721 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14722 move.srcOffset = srcAllocOffset;
14723 move.dstOffset = dstAllocOffset;
14724 move.size = srcAllocSize;
14726 moves.push_back(move);
14731 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14734 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14735 dstAllocOffset + srcAllocSize > dstBlockSize)
14738 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14740 ++dstBlockInfoIndex;
14741 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14742 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14743 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14744 dstBlockSize = pDstMetadata->GetSize();
14746 dstAllocOffset = 0;
14750 if(dstBlockInfoIndex == srcBlockInfoIndex)
14752 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14754 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14756 bool skipOver = overlap;
14757 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14761 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14766 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14768 dstOffset = srcAllocOffset + srcAllocSize;
14774 srcSuballocIt->offset = dstAllocOffset;
14775 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14776 dstOffset = dstAllocOffset + srcAllocSize;
14777 m_BytesMoved += srcAllocSize;
14778 ++m_AllocationsMoved;
14781 move.srcBlockIndex = srcOrigBlockIndex;
14782 move.dstBlockIndex = dstOrigBlockIndex;
14783 move.srcOffset = srcAllocOffset;
14784 move.dstOffset = dstAllocOffset;
14785 move.size = srcAllocSize;
14787 moves.push_back(move);
14795 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14796 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14798 VmaSuballocation suballoc = *srcSuballocIt;
14799 suballoc.offset = dstAllocOffset;
14800 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14801 dstOffset = dstAllocOffset + srcAllocSize;
14802 m_BytesMoved += srcAllocSize;
14803 ++m_AllocationsMoved;
14805 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14807 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14808 srcSuballocIt = nextSuballocIt;
14810 pDstMetadata->m_Suballocations.push_back(suballoc);
14812 move.srcBlockIndex = srcOrigBlockIndex;
14813 move.dstBlockIndex = dstOrigBlockIndex;
14814 move.srcOffset = srcAllocOffset;
14815 move.dstOffset = dstAllocOffset;
14816 move.size = srcAllocSize;
14818 moves.push_back(move);
14824 m_BlockInfos.clear();
14826 PostprocessMetadata();
14831 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14833 const size_t blockCount = m_pBlockVector->GetBlockCount();
14834 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14836 VmaBlockMetadata_Generic*
const pMetadata =
14837 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14838 pMetadata->m_FreeCount = 0;
14839 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14840 pMetadata->m_FreeSuballocationsBySize.clear();
14841 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14842 it != pMetadata->m_Suballocations.end(); )
14844 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14846 VmaSuballocationList::iterator nextIt = it;
14848 pMetadata->m_Suballocations.erase(it);
14859 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14861 const size_t blockCount = m_pBlockVector->GetBlockCount();
14862 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14864 VmaBlockMetadata_Generic*
const pMetadata =
14865 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14866 const VkDeviceSize blockSize = pMetadata->GetSize();
14869 if(pMetadata->m_Suballocations.empty())
14871 pMetadata->m_FreeCount = 1;
14873 VmaSuballocation suballoc = {
14877 VMA_SUBALLOCATION_TYPE_FREE };
14878 pMetadata->m_Suballocations.push_back(suballoc);
14879 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14884 VkDeviceSize offset = 0;
14885 VmaSuballocationList::iterator it;
14886 for(it = pMetadata->m_Suballocations.begin();
14887 it != pMetadata->m_Suballocations.end();
14890 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14891 VMA_ASSERT(it->offset >= offset);
14894 if(it->offset > offset)
14896 ++pMetadata->m_FreeCount;
14897 const VkDeviceSize freeSize = it->offset - offset;
14898 VmaSuballocation suballoc = {
14902 VMA_SUBALLOCATION_TYPE_FREE };
14903 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14904 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14906 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14910 pMetadata->m_SumFreeSize -= it->size;
14911 offset = it->offset + it->size;
14915 if(offset < blockSize)
14917 ++pMetadata->m_FreeCount;
14918 const VkDeviceSize freeSize = blockSize - offset;
14919 VmaSuballocation suballoc = {
14923 VMA_SUBALLOCATION_TYPE_FREE };
14924 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14925 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14926 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14928 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14933 pMetadata->m_FreeSuballocationsBySize.begin(),
14934 pMetadata->m_FreeSuballocationsBySize.end(),
14935 VmaSuballocationItemSizeLess());
14938 VMA_HEAVY_ASSERT(pMetadata->Validate());
14942 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14945 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14946 while(it != pMetadata->m_Suballocations.end())
14948 if(it->offset < suballoc.offset)
14953 pMetadata->m_Suballocations.insert(it, suballoc);
14959 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14962 VmaBlockVector* pBlockVector,
14963 uint32_t currFrameIndex) :
14965 mutexLocked(false),
14966 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14967 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14968 defragmentationMovesProcessed(0),
14969 defragmentationMovesCommitted(0),
14970 hasDefragmentationPlan(0),
14971 m_hAllocator(hAllocator),
14972 m_hCustomPool(hCustomPool),
14973 m_pBlockVector(pBlockVector),
14974 m_CurrFrameIndex(currFrameIndex),
14975 m_pAlgorithm(VMA_NULL),
14976 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14977 m_AllAllocations(false)
14981 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14983 vma_delete(m_hAllocator, m_pAlgorithm);
14986 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14988 AllocInfo info = { hAlloc, pChanged };
14989 m_Allocations.push_back(info);
14992 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14994 const bool allAllocations = m_AllAllocations ||
14995 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
15008 if(VMA_DEBUG_MARGIN == 0 &&
15010 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
15013 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
15014 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15018 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
15019 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15024 m_pAlgorithm->AddAll();
15028 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
15030 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
15038 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
15040 uint32_t currFrameIndex,
15043 m_hAllocator(hAllocator),
15044 m_CurrFrameIndex(currFrameIndex),
15047 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
15049 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
15052 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
15054 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15056 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
15057 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15058 vma_delete(m_hAllocator, pBlockVectorCtx);
15060 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
15062 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
15063 if(pBlockVectorCtx)
15065 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15066 vma_delete(m_hAllocator, pBlockVectorCtx);
15071 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
15073 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15075 VmaPool pool = pPools[poolIndex];
15078 if(pool->m_BlockVector.GetAlgorithm() == 0)
15080 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15082 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15084 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
15086 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15091 if(!pBlockVectorDefragCtx)
15093 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15096 &pool->m_BlockVector,
15098 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15101 pBlockVectorDefragCtx->AddAll();
15106 void VmaDefragmentationContext_T::AddAllocations(
15107 uint32_t allocationCount,
15109 VkBool32* pAllocationsChanged)
15112 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15115 VMA_ASSERT(hAlloc);
15117 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
15119 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
15121 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15123 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
15125 if(hAllocPool != VK_NULL_HANDLE)
15128 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
15130 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15132 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
15134 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15138 if(!pBlockVectorDefragCtx)
15140 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15143 &hAllocPool->m_BlockVector,
15145 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15152 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
15153 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
15154 if(!pBlockVectorDefragCtx)
15156 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15159 m_hAllocator->m_pBlockVectors[memTypeIndex],
15161 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
15165 if(pBlockVectorDefragCtx)
15167 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
15168 &pAllocationsChanged[allocIndex] : VMA_NULL;
15169 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
15175 VkResult VmaDefragmentationContext_T::Defragment(
15176 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
15177 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
15189 m_MaxCpuBytesToMove = maxCpuBytesToMove;
15190 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
15192 m_MaxGpuBytesToMove = maxGpuBytesToMove;
15193 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
15195 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
15196 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
15199 return VK_NOT_READY;
15202 if(commandBuffer == VK_NULL_HANDLE)
15204 maxGpuBytesToMove = 0;
15205 maxGpuAllocationsToMove = 0;
15208 VkResult res = VK_SUCCESS;
15211 for(uint32_t memTypeIndex = 0;
15212 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
15215 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15216 if(pBlockVectorCtx)
15218 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15219 pBlockVectorCtx->GetBlockVector()->Defragment(
15222 maxCpuBytesToMove, maxCpuAllocationsToMove,
15223 maxGpuBytesToMove, maxGpuAllocationsToMove,
15225 if(pBlockVectorCtx->res != VK_SUCCESS)
15227 res = pBlockVectorCtx->res;
15233 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15234 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
15237 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15238 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15239 pBlockVectorCtx->GetBlockVector()->Defragment(
15242 maxCpuBytesToMove, maxCpuAllocationsToMove,
15243 maxGpuBytesToMove, maxGpuAllocationsToMove,
15245 if(pBlockVectorCtx->res != VK_SUCCESS)
15247 res = pBlockVectorCtx->res;
15260 for(uint32_t memTypeIndex = 0;
15261 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15264 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15265 if(pBlockVectorCtx)
15267 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15269 if(!pBlockVectorCtx->hasDefragmentationPlan)
15271 pBlockVectorCtx->GetBlockVector()->Defragment(
15274 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15275 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15278 if(pBlockVectorCtx->res < VK_SUCCESS)
15281 pBlockVectorCtx->hasDefragmentationPlan =
true;
15284 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15286 pCurrentMove, movesLeft);
15288 movesLeft -= processed;
15289 pCurrentMove += processed;
15294 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15295 customCtxIndex < customCtxCount;
15298 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15299 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15301 if(!pBlockVectorCtx->hasDefragmentationPlan)
15303 pBlockVectorCtx->GetBlockVector()->Defragment(
15306 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15307 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15310 if(pBlockVectorCtx->res < VK_SUCCESS)
15313 pBlockVectorCtx->hasDefragmentationPlan =
true;
15316 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15318 pCurrentMove, movesLeft);
15320 movesLeft -= processed;
15321 pCurrentMove += processed;
15328 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
15330 VkResult res = VK_SUCCESS;
15333 for(uint32_t memTypeIndex = 0;
15334 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15337 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15338 if(pBlockVectorCtx)
15340 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15342 if(!pBlockVectorCtx->hasDefragmentationPlan)
15344 res = VK_NOT_READY;
15348 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15349 pBlockVectorCtx, m_pStats);
15351 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15352 res = VK_NOT_READY;
15357 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15358 customCtxIndex < customCtxCount;
15361 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15362 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15364 if(!pBlockVectorCtx->hasDefragmentationPlan)
15366 res = VK_NOT_READY;
15370 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15371 pBlockVectorCtx, m_pStats);
15373 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15374 res = VK_NOT_READY;
15383 #if VMA_RECORDING_ENABLED
15385 VmaRecorder::VmaRecorder() :
15389 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15395 m_UseMutex = useMutex;
15396 m_Flags = settings.
flags;
15398 #if defined(_WIN32)
15400 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15404 return VK_ERROR_INITIALIZATION_FAILED;
15408 m_File = fopen(settings.
pFilePath,
"wb");
15412 return VK_ERROR_INITIALIZATION_FAILED;
15417 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15418 fprintf(m_File,
"%s\n",
"1,8");
15423 VmaRecorder::~VmaRecorder()
15425 if(m_File != VMA_NULL)
15431 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15433 CallParams callParams;
15434 GetBasicParams(callParams);
15436 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15437 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15441 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15443 CallParams callParams;
15444 GetBasicParams(callParams);
15446 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15447 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15453 CallParams callParams;
15454 GetBasicParams(callParams);
15456 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15457 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15468 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15470 CallParams callParams;
15471 GetBasicParams(callParams);
15473 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15474 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15479 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15480 const VkMemoryRequirements& vkMemReq,
15484 CallParams callParams;
15485 GetBasicParams(callParams);
15487 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15488 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15489 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15491 vkMemReq.alignment,
15492 vkMemReq.memoryTypeBits,
15500 userDataStr.GetString());
15504 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15505 const VkMemoryRequirements& vkMemReq,
15507 uint64_t allocationCount,
15510 CallParams callParams;
15511 GetBasicParams(callParams);
15513 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15514 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15515 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15517 vkMemReq.alignment,
15518 vkMemReq.memoryTypeBits,
15525 PrintPointerList(allocationCount, pAllocations);
15526 fprintf(m_File,
",%s\n", userDataStr.GetString());
15530 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15531 const VkMemoryRequirements& vkMemReq,
15532 bool requiresDedicatedAllocation,
15533 bool prefersDedicatedAllocation,
15537 CallParams callParams;
15538 GetBasicParams(callParams);
15540 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15541 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15542 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15544 vkMemReq.alignment,
15545 vkMemReq.memoryTypeBits,
15546 requiresDedicatedAllocation ? 1 : 0,
15547 prefersDedicatedAllocation ? 1 : 0,
15555 userDataStr.GetString());
15559 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15560 const VkMemoryRequirements& vkMemReq,
15561 bool requiresDedicatedAllocation,
15562 bool prefersDedicatedAllocation,
15566 CallParams callParams;
15567 GetBasicParams(callParams);
15569 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15570 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15571 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15573 vkMemReq.alignment,
15574 vkMemReq.memoryTypeBits,
15575 requiresDedicatedAllocation ? 1 : 0,
15576 prefersDedicatedAllocation ? 1 : 0,
15584 userDataStr.GetString());
15588 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15591 CallParams callParams;
15592 GetBasicParams(callParams);
15594 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15595 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15600 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15601 uint64_t allocationCount,
15604 CallParams callParams;
15605 GetBasicParams(callParams);
15607 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15608 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15609 PrintPointerList(allocationCount, pAllocations);
15610 fprintf(m_File,
"\n");
15614 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15616 const void* pUserData)
15618 CallParams callParams;
15619 GetBasicParams(callParams);
15621 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15622 UserDataString userDataStr(
15625 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15627 userDataStr.GetString());
15631 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15634 CallParams callParams;
15635 GetBasicParams(callParams);
15637 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15638 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15643 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15646 CallParams callParams;
15647 GetBasicParams(callParams);
15649 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15650 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15655 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15658 CallParams callParams;
15659 GetBasicParams(callParams);
15661 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15662 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15667 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15668 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15670 CallParams callParams;
15671 GetBasicParams(callParams);
15673 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15674 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15681 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15682 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15684 CallParams callParams;
15685 GetBasicParams(callParams);
15687 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15688 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15695 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15696 const VkBufferCreateInfo& bufCreateInfo,
15700 CallParams callParams;
15701 GetBasicParams(callParams);
15703 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15704 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15705 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15706 bufCreateInfo.flags,
15707 bufCreateInfo.size,
15708 bufCreateInfo.usage,
15709 bufCreateInfo.sharingMode,
15710 allocCreateInfo.
flags,
15711 allocCreateInfo.
usage,
15715 allocCreateInfo.
pool,
15717 userDataStr.GetString());
15721 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15722 const VkImageCreateInfo& imageCreateInfo,
15726 CallParams callParams;
15727 GetBasicParams(callParams);
15729 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15730 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15731 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15732 imageCreateInfo.flags,
15733 imageCreateInfo.imageType,
15734 imageCreateInfo.format,
15735 imageCreateInfo.extent.width,
15736 imageCreateInfo.extent.height,
15737 imageCreateInfo.extent.depth,
15738 imageCreateInfo.mipLevels,
15739 imageCreateInfo.arrayLayers,
15740 imageCreateInfo.samples,
15741 imageCreateInfo.tiling,
15742 imageCreateInfo.usage,
15743 imageCreateInfo.sharingMode,
15744 imageCreateInfo.initialLayout,
15745 allocCreateInfo.
flags,
15746 allocCreateInfo.
usage,
15750 allocCreateInfo.
pool,
15752 userDataStr.GetString());
15756 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15759 CallParams callParams;
15760 GetBasicParams(callParams);
15762 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15763 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15768 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15771 CallParams callParams;
15772 GetBasicParams(callParams);
15774 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15775 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15780 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15783 CallParams callParams;
15784 GetBasicParams(callParams);
15786 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15787 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15792 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15795 CallParams callParams;
15796 GetBasicParams(callParams);
15798 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15799 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15804 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15807 CallParams callParams;
15808 GetBasicParams(callParams);
15810 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15811 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15816 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15820 CallParams callParams;
15821 GetBasicParams(callParams);
15823 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15824 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15827 fprintf(m_File,
",");
15829 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15839 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15842 CallParams callParams;
15843 GetBasicParams(callParams);
15845 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15846 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15851 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15855 CallParams callParams;
15856 GetBasicParams(callParams);
15858 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15859 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15860 pool, name != VMA_NULL ? name :
"");
15866 if(pUserData != VMA_NULL)
15870 m_Str = (
const char*)pUserData;
15875 snprintf(m_PtrStr, 17,
"%p", pUserData);
15885 void VmaRecorder::WriteConfiguration(
15886 const VkPhysicalDeviceProperties& devProps,
15887 const VkPhysicalDeviceMemoryProperties& memProps,
15888 uint32_t vulkanApiVersion,
15889 bool dedicatedAllocationExtensionEnabled,
15890 bool bindMemory2ExtensionEnabled,
15891 bool memoryBudgetExtensionEnabled,
15892 bool deviceCoherentMemoryExtensionEnabled)
15894 fprintf(m_File,
"Config,Begin\n");
15896 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15898 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15899 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15900 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15901 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15902 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15903 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15905 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15906 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15907 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15909 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15910 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15912 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15913 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15915 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15916 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15918 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15919 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15922 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15923 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15924 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15925 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15927 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15928 fprintf(m_File,
"Macro,VMA_MIN_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_MIN_ALIGNMENT);
15929 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15930 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15931 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15932 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15933 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15934 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15935 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15937 fprintf(m_File,
"Config,End\n");
15940 void VmaRecorder::GetBasicParams(CallParams& outParams)
15942 #if defined(_WIN32)
15943 outParams.threadId = GetCurrentThreadId();
15948 std::thread::id thread_id = std::this_thread::get_id();
15949 std::stringstream thread_id_to_string_converter;
15950 thread_id_to_string_converter << thread_id;
15951 std::string thread_id_as_string = thread_id_to_string_converter.str();
15952 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15955 auto current_time = std::chrono::high_resolution_clock::now();
15957 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15960 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15964 fprintf(m_File,
"%p", pItems[0]);
15965 for(uint64_t i = 1; i < count; ++i)
15967 fprintf(m_File,
" %p", pItems[i]);
15972 void VmaRecorder::Flush()
15985 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15986 m_Allocator(pAllocationCallbacks, 1024)
15990 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15992 VmaMutexLock mutexLock(m_Mutex);
15993 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15996 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15998 VmaMutexLock mutexLock(m_Mutex);
15999 m_Allocator.Free(hAlloc);
16007 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
16014 m_hDevice(pCreateInfo->device),
16015 m_hInstance(pCreateInfo->instance),
16016 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
16017 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
16018 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
16019 m_AllocationObjectAllocator(&m_AllocationCallbacks),
16020 m_HeapSizeLimitMask(0),
16021 m_DeviceMemoryCount(0),
16022 m_PreferredLargeHeapBlockSize(0),
16023 m_PhysicalDevice(pCreateInfo->physicalDevice),
16024 m_CurrentFrameIndex(0),
16025 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
16027 m_GlobalMemoryTypeBits(UINT32_MAX)
16029 ,m_pRecorder(VMA_NULL)
16032 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16034 m_UseKhrDedicatedAllocation =
false;
16035 m_UseKhrBindMemory2 =
false;
16038 if(VMA_DEBUG_DETECT_CORRUPTION)
16041 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
16046 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
16048 #if !(VMA_DEDICATED_ALLOCATION)
16051 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
16054 #if !(VMA_BIND_MEMORY2)
16057 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
16061 #if !(VMA_MEMORY_BUDGET)
16064 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
16067 #if !(VMA_BUFFER_DEVICE_ADDRESS)
16068 if(m_UseKhrBufferDeviceAddress)
16070 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16073 #if VMA_VULKAN_VERSION < 1002000
16074 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
16076 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
16079 #if VMA_VULKAN_VERSION < 1001000
16080 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16082 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
16085 #if !(VMA_MEMORY_PRIORITY)
16086 if(m_UseExtMemoryPriority)
16088 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16092 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
16093 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
16094 memset(&m_MemProps, 0,
sizeof(m_MemProps));
16096 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
16097 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
16108 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
16109 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
16111 VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT));
16112 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
16113 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
16114 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
16119 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
16123 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16125 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
16126 if(limit != VK_WHOLE_SIZE)
16128 m_HeapSizeLimitMask |= 1u << heapIndex;
16129 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
16131 m_MemProps.memoryHeaps[heapIndex].size = limit;
16137 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16139 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
16141 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
16145 preferredBlockSize,
16148 GetBufferImageGranularity(),
16153 GetMemoryTypeMinAlignment(memTypeIndex));
16161 VkResult res = VK_SUCCESS;
16166 #if VMA_RECORDING_ENABLED
16167 m_pRecorder = vma_new(
this, VmaRecorder)();
16169 if(res != VK_SUCCESS)
16173 m_pRecorder->WriteConfiguration(
16174 m_PhysicalDeviceProperties,
16176 m_VulkanApiVersion,
16177 m_UseKhrDedicatedAllocation,
16178 m_UseKhrBindMemory2,
16179 m_UseExtMemoryBudget,
16180 m_UseAmdDeviceCoherentMemory);
16181 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
16183 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
16184 return VK_ERROR_FEATURE_NOT_PRESENT;
16188 #if VMA_MEMORY_BUDGET
16189 if(m_UseExtMemoryBudget)
16191 UpdateVulkanBudget();
16198 VmaAllocator_T::~VmaAllocator_T()
16200 #if VMA_RECORDING_ENABLED
16201 if(m_pRecorder != VMA_NULL)
16203 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
16204 vma_delete(
this, m_pRecorder);
16208 VMA_ASSERT(m_Pools.IsEmpty());
16210 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
16212 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
16214 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
16217 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
16221 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
16223 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16224 ImportVulkanFunctions_Static();
16227 if(pVulkanFunctions != VMA_NULL)
16229 ImportVulkanFunctions_Custom(pVulkanFunctions);
16232 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16233 ImportVulkanFunctions_Dynamic();
16236 ValidateVulkanFunctions();
16239 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16241 void VmaAllocator_T::ImportVulkanFunctions_Static()
16244 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
16245 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
16246 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
16247 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
16248 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
16249 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
16250 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
16251 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
16252 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
16253 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
16254 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
16255 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
16256 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
16257 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
16258 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
16259 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
16260 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
16263 #if VMA_VULKAN_VERSION >= 1001000
16264 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16266 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
16267 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
16268 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
16269 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
16270 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
16277 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
16279 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
16281 #define VMA_COPY_IF_NOT_NULL(funcName) \
16282 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
16284 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
16285 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
16286 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
16287 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
16288 VMA_COPY_IF_NOT_NULL(vkMapMemory);
16289 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
16290 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
16291 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
16292 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
16293 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
16294 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
16295 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
16296 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
16297 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
16298 VMA_COPY_IF_NOT_NULL(vkCreateImage);
16299 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
16300 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
16302 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16303 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
16304 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
16307 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16308 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
16309 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
16312 #if VMA_MEMORY_BUDGET
16313 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
16316 #undef VMA_COPY_IF_NOT_NULL
16319 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16321 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
16323 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
16324 if(m_VulkanFunctions.memberName == VMA_NULL) \
16325 m_VulkanFunctions.memberName = \
16326 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
16327 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
16328 if(m_VulkanFunctions.memberName == VMA_NULL) \
16329 m_VulkanFunctions.memberName = \
16330 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
16332 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
16333 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
16334 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
16335 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
16336 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
16337 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
16338 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
16339 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
16340 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
16341 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
16342 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
16343 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
16344 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
16345 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
16346 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
16347 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
16348 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16350 #if VMA_VULKAN_VERSION >= 1001000
16351 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16353 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16354 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16355 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16356 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16357 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16361 #if VMA_DEDICATED_ALLOCATION
16362 if(m_UseKhrDedicatedAllocation)
16364 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16365 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16369 #if VMA_BIND_MEMORY2
16370 if(m_UseKhrBindMemory2)
16372 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16373 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16377 #if VMA_MEMORY_BUDGET
16378 if(m_UseExtMemoryBudget)
16380 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16384 #undef VMA_FETCH_DEVICE_FUNC
16385 #undef VMA_FETCH_INSTANCE_FUNC
16390 void VmaAllocator_T::ValidateVulkanFunctions()
16392 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16393 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16394 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16395 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16396 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16397 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16398 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16399 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16400 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16401 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16402 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16403 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16404 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16405 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16406 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16407 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16408 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16410 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16411 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16413 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16414 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16418 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16419 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16421 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16422 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16426 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16427 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16429 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16434 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16436 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16437 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16438 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16439 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16442 VkResult VmaAllocator_T::AllocateMemoryOfType(
16444 VkDeviceSize alignment,
16445 bool dedicatedAllocation,
16446 VkBuffer dedicatedBuffer,
16447 VkBufferUsageFlags dedicatedBufferUsage,
16448 VkImage dedicatedImage,
16450 uint32_t memTypeIndex,
16451 VmaSuballocationType suballocType,
16452 size_t allocationCount,
16455 VMA_ASSERT(pAllocations != VMA_NULL);
16456 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16462 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16472 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16473 VMA_ASSERT(blockVector);
16475 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16476 bool preferDedicatedMemory =
16477 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16478 dedicatedAllocation ||
16480 size > preferredBlockSize / 2;
16482 if(preferDedicatedMemory &&
16484 finalCreateInfo.
pool == VK_NULL_HANDLE)
16493 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16497 return AllocateDedicatedMemory(
16507 dedicatedBufferUsage,
16515 VkResult res = blockVector->Allocate(
16516 m_CurrentFrameIndex.load(),
16523 if(res == VK_SUCCESS)
16531 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16537 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
16539 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16542 res = AllocateDedicatedMemory(
16552 dedicatedBufferUsage,
16556 if(res == VK_SUCCESS)
16559 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16565 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16571 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16573 VmaSuballocationType suballocType,
16574 uint32_t memTypeIndex,
16577 bool isUserDataString,
16580 VkBuffer dedicatedBuffer,
16581 VkBufferUsageFlags dedicatedBufferUsage,
16582 VkImage dedicatedImage,
16583 size_t allocationCount,
16586 VMA_ASSERT(allocationCount > 0 && pAllocations);
16590 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16592 GetBudget(&heapBudget, heapIndex, 1);
16593 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16595 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16599 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16600 allocInfo.memoryTypeIndex = memTypeIndex;
16601 allocInfo.allocationSize = size;
16603 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16604 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16605 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16607 if(dedicatedBuffer != VK_NULL_HANDLE)
16609 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16610 dedicatedAllocInfo.buffer = dedicatedBuffer;
16611 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16613 else if(dedicatedImage != VK_NULL_HANDLE)
16615 dedicatedAllocInfo.image = dedicatedImage;
16616 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16621 #if VMA_BUFFER_DEVICE_ADDRESS
16622 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16623 if(m_UseKhrBufferDeviceAddress)
16625 bool canContainBufferWithDeviceAddress =
true;
16626 if(dedicatedBuffer != VK_NULL_HANDLE)
16628 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16629 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16631 else if(dedicatedImage != VK_NULL_HANDLE)
16633 canContainBufferWithDeviceAddress =
false;
16635 if(canContainBufferWithDeviceAddress)
16637 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16638 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16643 #if VMA_MEMORY_PRIORITY
16644 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
16645 if(m_UseExtMemoryPriority)
16647 priorityInfo.priority = priority;
16648 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
16653 VkResult res = VK_SUCCESS;
16654 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16656 res = AllocateDedicatedMemoryPage(
16664 pAllocations + allocIndex);
16665 if(res != VK_SUCCESS)
16671 if(res == VK_SUCCESS)
16675 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16676 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
16677 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16679 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
16683 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16688 while(allocIndex--)
16691 VkDeviceMemory hMemory = currAlloc->GetMemory();
16703 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16704 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16705 currAlloc->SetUserData(
this, VMA_NULL);
16706 m_AllocationObjectAllocator.Free(currAlloc);
16709 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16715 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16717 VmaSuballocationType suballocType,
16718 uint32_t memTypeIndex,
16719 const VkMemoryAllocateInfo& allocInfo,
16721 bool isUserDataString,
16725 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16726 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16729 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16733 void* pMappedData = VMA_NULL;
16736 res = (*m_VulkanFunctions.vkMapMemory)(
16745 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16746 FreeVulkanMemory(memTypeIndex, size, hMemory);
16751 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16752 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16753 (*pAllocation)->SetUserData(
this, pUserData);
16754 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16755 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16757 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16763 void VmaAllocator_T::GetBufferMemoryRequirements(
16765 VkMemoryRequirements& memReq,
16766 bool& requiresDedicatedAllocation,
16767 bool& prefersDedicatedAllocation)
const
16769 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16770 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16772 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16773 memReqInfo.buffer = hBuffer;
16775 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16777 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16778 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16780 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16782 memReq = memReq2.memoryRequirements;
16783 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16784 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16789 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16790 requiresDedicatedAllocation =
false;
16791 prefersDedicatedAllocation =
false;
16795 void VmaAllocator_T::GetImageMemoryRequirements(
16797 VkMemoryRequirements& memReq,
16798 bool& requiresDedicatedAllocation,
16799 bool& prefersDedicatedAllocation)
const
16801 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16802 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16804 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16805 memReqInfo.image = hImage;
16807 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16809 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16810 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16812 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16814 memReq = memReq2.memoryRequirements;
16815 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16816 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16821 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16822 requiresDedicatedAllocation =
false;
16823 prefersDedicatedAllocation =
false;
16827 VkResult VmaAllocator_T::AllocateMemory(
16828 const VkMemoryRequirements& vkMemReq,
16829 bool requiresDedicatedAllocation,
16830 bool prefersDedicatedAllocation,
16831 VkBuffer dedicatedBuffer,
16832 VkBufferUsageFlags dedicatedBufferUsage,
16833 VkImage dedicatedImage,
16835 VmaSuballocationType suballocType,
16836 size_t allocationCount,
16839 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16841 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16843 if(vkMemReq.size == 0)
16845 return VK_ERROR_VALIDATION_FAILED_EXT;
16850 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16851 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16856 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16857 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16859 if(requiresDedicatedAllocation)
16863 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16864 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16866 if(createInfo.
pool != VK_NULL_HANDLE)
16868 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16869 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16872 if((createInfo.
pool != VK_NULL_HANDLE) &&
16875 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16876 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16879 if(createInfo.
pool != VK_NULL_HANDLE)
16884 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16889 return createInfo.
pool->m_BlockVector.Allocate(
16890 m_CurrentFrameIndex.load(),
16892 vkMemReq.alignment,
16901 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16902 uint32_t memTypeIndex = UINT32_MAX;
16904 if(res == VK_SUCCESS)
16906 res = AllocateMemoryOfType(
16908 vkMemReq.alignment,
16909 requiresDedicatedAllocation || prefersDedicatedAllocation,
16911 dedicatedBufferUsage,
16919 if(res == VK_SUCCESS)
16929 memoryTypeBits &= ~(1u << memTypeIndex);
16932 if(res == VK_SUCCESS)
16934 res = AllocateMemoryOfType(
16936 vkMemReq.alignment,
16937 requiresDedicatedAllocation || prefersDedicatedAllocation,
16939 dedicatedBufferUsage,
16947 if(res == VK_SUCCESS)
16957 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16968 void VmaAllocator_T::FreeMemory(
16969 size_t allocationCount,
16972 VMA_ASSERT(pAllocations);
16974 for(
size_t allocIndex = allocationCount; allocIndex--; )
16978 if(allocation != VK_NULL_HANDLE)
16980 if(TouchAllocation(allocation))
16982 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16984 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16987 switch(allocation->GetType())
16989 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16991 VmaBlockVector* pBlockVector = VMA_NULL;
16992 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16993 if(hPool != VK_NULL_HANDLE)
16995 pBlockVector = &hPool->m_BlockVector;
16999 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17000 pBlockVector = m_pBlockVectors[memTypeIndex];
17002 pBlockVector->Free(allocation);
17005 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17006 FreeDedicatedMemory(allocation);
17014 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
17015 allocation->SetUserData(
this, VMA_NULL);
17016 m_AllocationObjectAllocator.Free(allocation);
17021 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
17024 InitStatInfo(pStats->
total);
17025 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
17027 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
17031 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17033 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17034 VMA_ASSERT(pBlockVector);
17035 pBlockVector->AddStats(pStats);
17040 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17041 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17043 pool->m_BlockVector.AddStats(pStats);
17048 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17050 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
17051 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17052 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17054 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
17057 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
17058 VmaAddStatInfo(pStats->
total, allocationStatInfo);
17059 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
17060 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
17065 VmaPostprocessCalcStatInfo(pStats->
total);
17066 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
17067 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
17068 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
17069 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
17072 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
17074 #if VMA_MEMORY_BUDGET
17075 if(m_UseExtMemoryBudget)
17077 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
17079 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
17080 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17082 const uint32_t heapIndex = firstHeap + i;
17084 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17087 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
17089 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
17090 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17094 outBudget->
usage = 0;
17098 outBudget->
budget = VMA_MIN(
17099 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
17104 UpdateVulkanBudget();
17105 GetBudget(outBudget, firstHeap, heapCount);
17111 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17113 const uint32_t heapIndex = firstHeap + i;
17115 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17119 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17124 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
17126 VkResult VmaAllocator_T::DefragmentationBegin(
17136 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
17137 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
17140 (*pContext)->AddAllocations(
17143 VkResult res = (*pContext)->Defragment(
17148 if(res != VK_NOT_READY)
17150 vma_delete(
this, *pContext);
17151 *pContext = VMA_NULL;
17157 VkResult VmaAllocator_T::DefragmentationEnd(
17160 vma_delete(
this, context);
17164 VkResult VmaAllocator_T::DefragmentationPassBegin(
17168 return context->DefragmentPassBegin(pInfo);
17170 VkResult VmaAllocator_T::DefragmentationPassEnd(
17173 return context->DefragmentPassEnd();
17179 if(hAllocation->CanBecomeLost())
17185 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17186 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17189 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17193 pAllocationInfo->
offset = 0;
17194 pAllocationInfo->
size = hAllocation->GetSize();
17196 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17199 else if(localLastUseFrameIndex == localCurrFrameIndex)
17201 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17202 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17203 pAllocationInfo->
offset = hAllocation->GetOffset();
17204 pAllocationInfo->
size = hAllocation->GetSize();
17206 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17211 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17213 localLastUseFrameIndex = localCurrFrameIndex;
17220 #if VMA_STATS_STRING_ENABLED
17221 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17222 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17225 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17226 if(localLastUseFrameIndex == localCurrFrameIndex)
17232 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17234 localLastUseFrameIndex = localCurrFrameIndex;
17240 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17241 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17242 pAllocationInfo->
offset = hAllocation->GetOffset();
17243 pAllocationInfo->
size = hAllocation->GetSize();
17244 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
17245 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17249 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
17252 if(hAllocation->CanBecomeLost())
17254 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17255 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17258 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17262 else if(localLastUseFrameIndex == localCurrFrameIndex)
17268 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17270 localLastUseFrameIndex = localCurrFrameIndex;
17277 #if VMA_STATS_STRING_ENABLED
17278 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17279 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17282 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17283 if(localLastUseFrameIndex == localCurrFrameIndex)
17289 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17291 localLastUseFrameIndex = localCurrFrameIndex;
17303 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
17313 return VK_ERROR_INITIALIZATION_FAILED;
17317 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
17319 return VK_ERROR_FEATURE_NOT_PRESENT;
17326 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
17328 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
17330 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
17331 if(res != VK_SUCCESS)
17333 vma_delete(
this, *pPool);
17340 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17341 (*pPool)->SetId(m_NextPoolId++);
17342 m_Pools.PushBack(*pPool);
17348 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17352 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17353 m_Pools.Remove(pool);
17356 vma_delete(
this, pool);
17361 pool->m_BlockVector.GetPoolStats(pPoolStats);
17364 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17366 m_CurrentFrameIndex.store(frameIndex);
17368 #if VMA_MEMORY_BUDGET
17369 if(m_UseExtMemoryBudget)
17371 UpdateVulkanBudget();
17376 void VmaAllocator_T::MakePoolAllocationsLost(
17378 size_t* pLostAllocationCount)
17380 hPool->m_BlockVector.MakePoolAllocationsLost(
17381 m_CurrentFrameIndex.load(),
17382 pLostAllocationCount);
17385 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17387 return hPool->m_BlockVector.CheckCorruption();
17390 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17392 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17395 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17397 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17399 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17400 VMA_ASSERT(pBlockVector);
17401 VkResult localRes = pBlockVector->CheckCorruption();
17404 case VK_ERROR_FEATURE_NOT_PRESENT:
17407 finalRes = VK_SUCCESS;
17417 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17418 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17420 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17422 VkResult localRes = pool->m_BlockVector.CheckCorruption();
17425 case VK_ERROR_FEATURE_NOT_PRESENT:
17428 finalRes = VK_SUCCESS;
17440 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17442 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17443 (*pAllocation)->InitLost();
17447 template<
typename T>
17448 struct AtomicTransactionalIncrement
17451 typedef std::atomic<T> AtomicT;
17452 ~AtomicTransactionalIncrement()
17457 T Increment(AtomicT* atomic)
17460 return m_Atomic->fetch_add(1);
17464 m_Atomic =
nullptr;
17468 AtomicT* m_Atomic =
nullptr;
17471 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17473 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
17474 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
17475 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
17476 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
17478 return VK_ERROR_TOO_MANY_OBJECTS;
17482 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17485 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17487 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17488 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17491 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17492 if(blockBytesAfterAllocation > heapSize)
17494 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17496 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17504 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17508 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17510 if(res == VK_SUCCESS)
17512 #if VMA_MEMORY_BUDGET
17513 ++m_Budget.m_OperationsSinceBudgetFetch;
17517 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17519 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17522 deviceMemoryCountIncrement.Commit();
17526 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17532 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17535 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17537 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17541 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17543 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17545 --m_DeviceMemoryCount;
17548 VkResult VmaAllocator_T::BindVulkanBuffer(
17549 VkDeviceMemory memory,
17550 VkDeviceSize memoryOffset,
17554 if(pNext != VMA_NULL)
17556 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17557 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17558 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17560 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17561 bindBufferMemoryInfo.pNext = pNext;
17562 bindBufferMemoryInfo.buffer = buffer;
17563 bindBufferMemoryInfo.memory = memory;
17564 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17565 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17570 return VK_ERROR_EXTENSION_NOT_PRESENT;
17575 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17579 VkResult VmaAllocator_T::BindVulkanImage(
17580 VkDeviceMemory memory,
17581 VkDeviceSize memoryOffset,
17585 if(pNext != VMA_NULL)
17587 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17588 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17589 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17591 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17592 bindBufferMemoryInfo.pNext = pNext;
17593 bindBufferMemoryInfo.image = image;
17594 bindBufferMemoryInfo.memory = memory;
17595 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17596 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17601 return VK_ERROR_EXTENSION_NOT_PRESENT;
17606 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17610 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17612 if(hAllocation->CanBecomeLost())
17614 return VK_ERROR_MEMORY_MAP_FAILED;
17617 switch(hAllocation->GetType())
17619 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17621 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17622 char *pBytes = VMA_NULL;
17623 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17624 if(res == VK_SUCCESS)
17626 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17627 hAllocation->BlockAllocMap();
17631 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17632 return hAllocation->DedicatedAllocMap(
this, ppData);
17635 return VK_ERROR_MEMORY_MAP_FAILED;
17641 switch(hAllocation->GetType())
17643 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17645 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17646 hAllocation->BlockAllocUnmap();
17647 pBlock->Unmap(
this, 1);
17650 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17651 hAllocation->DedicatedAllocUnmap(
this);
17658 VkResult VmaAllocator_T::BindBufferMemory(
17660 VkDeviceSize allocationLocalOffset,
17664 VkResult res = VK_SUCCESS;
17665 switch(hAllocation->GetType())
17667 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17668 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17670 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17672 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17673 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17674 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17683 VkResult VmaAllocator_T::BindImageMemory(
17685 VkDeviceSize allocationLocalOffset,
17689 VkResult res = VK_SUCCESS;
17690 switch(hAllocation->GetType())
17692 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17693 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17695 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17697 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17698 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17699 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17708 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17710 VkDeviceSize offset, VkDeviceSize size,
17711 VMA_CACHE_OPERATION op)
17713 VkResult res = VK_SUCCESS;
17715 VkMappedMemoryRange memRange = {};
17716 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17720 case VMA_CACHE_FLUSH:
17721 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17723 case VMA_CACHE_INVALIDATE:
17724 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17734 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17735 uint32_t allocationCount,
17737 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17738 VMA_CACHE_OPERATION op)
17740 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17741 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17742 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17744 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17747 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17748 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17749 VkMappedMemoryRange newRange;
17750 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17752 ranges.push_back(newRange);
17756 VkResult res = VK_SUCCESS;
17757 if(!ranges.empty())
17761 case VMA_CACHE_FLUSH:
17762 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17764 case VMA_CACHE_INVALIDATE:
17765 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17775 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17777 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17779 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17781 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17782 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
17783 dedicatedAllocations.Remove(allocation);
17786 VkDeviceMemory hMemory = allocation->GetMemory();
17798 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17800 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17803 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17805 VkBufferCreateInfo dummyBufCreateInfo;
17806 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17808 uint32_t memoryTypeBits = 0;
17811 VkBuffer buf = VK_NULL_HANDLE;
17812 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17813 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17814 if(res == VK_SUCCESS)
17817 VkMemoryRequirements memReq;
17818 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17819 memoryTypeBits = memReq.memoryTypeBits;
17822 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17825 return memoryTypeBits;
17828 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17831 VMA_ASSERT(GetMemoryTypeCount() > 0);
17833 uint32_t memoryTypeBits = UINT32_MAX;
17835 if(!m_UseAmdDeviceCoherentMemory)
17838 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17840 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17842 memoryTypeBits &= ~(1u << memTypeIndex);
17847 return memoryTypeBits;
17850 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17852 VkDeviceSize offset, VkDeviceSize size,
17853 VkMappedMemoryRange& outRange)
const
17855 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17856 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17858 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17859 const VkDeviceSize allocationSize = allocation->GetSize();
17860 VMA_ASSERT(offset <= allocationSize);
17862 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17863 outRange.pNext = VMA_NULL;
17864 outRange.memory = allocation->GetMemory();
17866 switch(allocation->GetType())
17868 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17869 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17870 if(size == VK_WHOLE_SIZE)
17872 outRange.size = allocationSize - outRange.offset;
17876 VMA_ASSERT(offset + size <= allocationSize);
17877 outRange.size = VMA_MIN(
17878 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17879 allocationSize - outRange.offset);
17882 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17885 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17886 if(size == VK_WHOLE_SIZE)
17888 size = allocationSize - offset;
17892 VMA_ASSERT(offset + size <= allocationSize);
17894 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17897 const VkDeviceSize allocationOffset = allocation->GetOffset();
17898 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17899 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17900 outRange.offset += allocationOffset;
17901 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17913 #if VMA_MEMORY_BUDGET
17915 void VmaAllocator_T::UpdateVulkanBudget()
17917 VMA_ASSERT(m_UseExtMemoryBudget);
17919 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17921 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17922 VmaPnextChainPushFront(&memProps, &budgetProps);
17924 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17927 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17929 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17931 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17932 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17933 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17936 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17938 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17940 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17942 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17944 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17946 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17949 m_Budget.m_OperationsSinceBudgetFetch = 0;
17955 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17957 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17958 !hAllocation->CanBecomeLost() &&
17959 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17961 void* pData = VMA_NULL;
17962 VkResult res = Map(hAllocation, &pData);
17963 if(res == VK_SUCCESS)
17965 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17966 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17967 Unmap(hAllocation);
17971 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17976 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17978 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17979 if(memoryTypeBits == UINT32_MAX)
17981 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17982 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17984 return memoryTypeBits;
17987 #if VMA_STATS_STRING_ENABLED
17989 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17991 bool dedicatedAllocationsStarted =
false;
17992 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17994 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17995 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17996 if(!dedicatedAllocList.IsEmpty())
17998 if(dedicatedAllocationsStarted ==
false)
18000 dedicatedAllocationsStarted =
true;
18001 json.WriteString(
"DedicatedAllocations");
18002 json.BeginObject();
18005 json.BeginString(
"Type ");
18006 json.ContinueString(memTypeIndex);
18012 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
18014 json.BeginObject(
true);
18015 alloc->PrintParameters(json);
18022 if(dedicatedAllocationsStarted)
18028 bool allocationsStarted =
false;
18029 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18031 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
18033 if(allocationsStarted ==
false)
18035 allocationsStarted =
true;
18036 json.WriteString(
"DefaultPools");
18037 json.BeginObject();
18040 json.BeginString(
"Type ");
18041 json.ContinueString(memTypeIndex);
18044 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
18047 if(allocationsStarted)
18055 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
18056 if(!m_Pools.IsEmpty())
18058 json.WriteString(
"Pools");
18059 json.BeginObject();
18060 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
18062 json.BeginString();
18063 json.ContinueString(pool->GetId());
18066 pool->m_BlockVector.PrintDetailedMap(json);
18082 VMA_ASSERT(pCreateInfo && pAllocator);
18085 VMA_DEBUG_LOG(
"vmaCreateAllocator");
18087 return (*pAllocator)->Init(pCreateInfo);
18093 if(allocator != VK_NULL_HANDLE)
18095 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
18096 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
18097 vma_delete(&allocationCallbacks, allocator);
18103 VMA_ASSERT(allocator && pAllocatorInfo);
18104 pAllocatorInfo->
instance = allocator->m_hInstance;
18105 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
18106 pAllocatorInfo->
device = allocator->m_hDevice;
18111 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
18113 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
18114 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
18119 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
18121 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
18122 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
18127 uint32_t memoryTypeIndex,
18128 VkMemoryPropertyFlags* pFlags)
18130 VMA_ASSERT(allocator && pFlags);
18131 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
18132 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
18137 uint32_t frameIndex)
18139 VMA_ASSERT(allocator);
18140 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
18142 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18144 allocator->SetCurrentFrameIndex(frameIndex);
18151 VMA_ASSERT(allocator && pStats);
18152 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18153 allocator->CalculateStats(pStats);
18160 VMA_ASSERT(allocator && pBudget);
18161 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18162 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
18165 #if VMA_STATS_STRING_ENABLED
18169 char** ppStatsString,
18170 VkBool32 detailedMap)
18172 VMA_ASSERT(allocator && ppStatsString);
18173 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18175 VmaStringBuilder sb(allocator);
18177 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
18178 json.BeginObject();
18181 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
18184 allocator->CalculateStats(&stats);
18186 json.WriteString(
"Total");
18187 VmaPrintStatInfo(json, stats.
total);
18189 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
18191 json.BeginString(
"Heap ");
18192 json.ContinueString(heapIndex);
18194 json.BeginObject();
18196 json.WriteString(
"Size");
18197 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
18199 json.WriteString(
"Flags");
18200 json.BeginArray(
true);
18201 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
18203 json.WriteString(
"DEVICE_LOCAL");
18207 json.WriteString(
"Budget");
18208 json.BeginObject();
18210 json.WriteString(
"BlockBytes");
18211 json.WriteNumber(budget[heapIndex].blockBytes);
18212 json.WriteString(
"AllocationBytes");
18213 json.WriteNumber(budget[heapIndex].allocationBytes);
18214 json.WriteString(
"Usage");
18215 json.WriteNumber(budget[heapIndex].usage);
18216 json.WriteString(
"Budget");
18217 json.WriteNumber(budget[heapIndex].budget);
18223 json.WriteString(
"Stats");
18224 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
18227 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
18229 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
18231 json.BeginString(
"Type ");
18232 json.ContinueString(typeIndex);
18235 json.BeginObject();
18237 json.WriteString(
"Flags");
18238 json.BeginArray(
true);
18239 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
18240 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
18242 json.WriteString(
"DEVICE_LOCAL");
18244 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18246 json.WriteString(
"HOST_VISIBLE");
18248 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
18250 json.WriteString(
"HOST_COHERENT");
18252 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
18254 json.WriteString(
"HOST_CACHED");
18256 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
18258 json.WriteString(
"LAZILY_ALLOCATED");
18260 #if VMA_VULKAN_VERSION >= 1001000
18261 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
18263 json.WriteString(
"PROTECTED");
18266 #if VK_AMD_device_coherent_memory
18267 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
18269 json.WriteString(
"DEVICE_COHERENT");
18271 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
18273 json.WriteString(
"DEVICE_UNCACHED");
18280 json.WriteString(
"Stats");
18281 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
18290 if(detailedMap == VK_TRUE)
18292 allocator->PrintDetailedMap(json);
18298 const size_t len = sb.GetLength();
18299 char*
const pChars = vma_new_array(allocator,
char, len + 1);
18302 memcpy(pChars, sb.GetData(), len);
18304 pChars[len] =
'\0';
18305 *ppStatsString = pChars;
18310 char* pStatsString)
18312 if(pStatsString != VMA_NULL)
18314 VMA_ASSERT(allocator);
18315 size_t len = strlen(pStatsString);
18316 vma_delete_array(allocator, pStatsString, len + 1);
18327 uint32_t memoryTypeBits,
18329 uint32_t* pMemoryTypeIndex)
18331 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18332 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18333 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18335 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
18342 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
18343 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
18344 uint32_t notPreferredFlags = 0;
18347 switch(pAllocationCreateInfo->
usage)
18352 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18354 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18358 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
18361 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18362 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18364 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18368 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18369 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
18372 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18375 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18384 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18386 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18389 *pMemoryTypeIndex = UINT32_MAX;
18390 uint32_t minCost = UINT32_MAX;
18391 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18392 memTypeIndex < allocator->GetMemoryTypeCount();
18393 ++memTypeIndex, memTypeBit <<= 1)
18396 if((memTypeBit & memoryTypeBits) != 0)
18398 const VkMemoryPropertyFlags currFlags =
18399 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18401 if((requiredFlags & ~currFlags) == 0)
18404 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18405 VmaCountBitsSet(currFlags & notPreferredFlags);
18407 if(currCost < minCost)
18409 *pMemoryTypeIndex = memTypeIndex;
18414 minCost = currCost;
18419 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18424 const VkBufferCreateInfo* pBufferCreateInfo,
18426 uint32_t* pMemoryTypeIndex)
18428 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18429 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18430 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18431 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18433 const VkDevice hDev = allocator->m_hDevice;
18434 VkBuffer hBuffer = VK_NULL_HANDLE;
18435 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18436 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18437 if(res == VK_SUCCESS)
18439 VkMemoryRequirements memReq = {};
18440 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18441 hDev, hBuffer, &memReq);
18445 memReq.memoryTypeBits,
18446 pAllocationCreateInfo,
18449 allocator->GetVulkanFunctions().vkDestroyBuffer(
18450 hDev, hBuffer, allocator->GetAllocationCallbacks());
18457 const VkImageCreateInfo* pImageCreateInfo,
18459 uint32_t* pMemoryTypeIndex)
18461 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18462 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18463 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18464 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18466 const VkDevice hDev = allocator->m_hDevice;
18467 VkImage hImage = VK_NULL_HANDLE;
18468 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18469 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18470 if(res == VK_SUCCESS)
18472 VkMemoryRequirements memReq = {};
18473 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18474 hDev, hImage, &memReq);
18478 memReq.memoryTypeBits,
18479 pAllocationCreateInfo,
18482 allocator->GetVulkanFunctions().vkDestroyImage(
18483 hDev, hImage, allocator->GetAllocationCallbacks());
18493 VMA_ASSERT(allocator && pCreateInfo && pPool);
18495 VMA_DEBUG_LOG(
"vmaCreatePool");
18497 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18499 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18501 #if VMA_RECORDING_ENABLED
18502 if(allocator->GetRecorder() != VMA_NULL)
18504 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18515 VMA_ASSERT(allocator);
18517 if(pool == VK_NULL_HANDLE)
18522 VMA_DEBUG_LOG(
"vmaDestroyPool");
18524 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18526 #if VMA_RECORDING_ENABLED
18527 if(allocator->GetRecorder() != VMA_NULL)
18529 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18533 allocator->DestroyPool(pool);
18541 VMA_ASSERT(allocator && pool && pPoolStats);
18543 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18545 allocator->GetPoolStats(pool, pPoolStats);
18551 size_t* pLostAllocationCount)
18553 VMA_ASSERT(allocator && pool);
18555 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18557 #if VMA_RECORDING_ENABLED
18558 if(allocator->GetRecorder() != VMA_NULL)
18560 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18564 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18569 VMA_ASSERT(allocator && pool);
18571 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18573 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18575 return allocator->CheckPoolCorruption(pool);
18581 const char** ppName)
18583 VMA_ASSERT(allocator && pool && ppName);
18585 VMA_DEBUG_LOG(
"vmaGetPoolName");
18587 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18589 *ppName = pool->GetName();
18597 VMA_ASSERT(allocator && pool);
18599 VMA_DEBUG_LOG(
"vmaSetPoolName");
18601 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18603 pool->SetName(pName);
18605 #if VMA_RECORDING_ENABLED
18606 if(allocator->GetRecorder() != VMA_NULL)
18608 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18615 const VkMemoryRequirements* pVkMemoryRequirements,
18620 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18622 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18624 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18626 VkResult result = allocator->AllocateMemory(
18627 *pVkMemoryRequirements,
18634 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18638 #if VMA_RECORDING_ENABLED
18639 if(allocator->GetRecorder() != VMA_NULL)
18641 allocator->GetRecorder()->RecordAllocateMemory(
18642 allocator->GetCurrentFrameIndex(),
18643 *pVkMemoryRequirements,
18649 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18651 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18659 const VkMemoryRequirements* pVkMemoryRequirements,
18661 size_t allocationCount,
18665 if(allocationCount == 0)
18670 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18672 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18674 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18676 VkResult result = allocator->AllocateMemory(
18677 *pVkMemoryRequirements,
18684 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18688 #if VMA_RECORDING_ENABLED
18689 if(allocator->GetRecorder() != VMA_NULL)
18691 allocator->GetRecorder()->RecordAllocateMemoryPages(
18692 allocator->GetCurrentFrameIndex(),
18693 *pVkMemoryRequirements,
18695 (uint64_t)allocationCount,
18700 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18702 for(
size_t i = 0; i < allocationCount; ++i)
18704 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18718 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18720 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18722 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18724 VkMemoryRequirements vkMemReq = {};
18725 bool requiresDedicatedAllocation =
false;
18726 bool prefersDedicatedAllocation =
false;
18727 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18728 requiresDedicatedAllocation,
18729 prefersDedicatedAllocation);
18731 VkResult result = allocator->AllocateMemory(
18733 requiresDedicatedAllocation,
18734 prefersDedicatedAllocation,
18739 VMA_SUBALLOCATION_TYPE_BUFFER,
18743 #if VMA_RECORDING_ENABLED
18744 if(allocator->GetRecorder() != VMA_NULL)
18746 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18747 allocator->GetCurrentFrameIndex(),
18749 requiresDedicatedAllocation,
18750 prefersDedicatedAllocation,
18756 if(pAllocationInfo && result == VK_SUCCESS)
18758 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18771 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18773 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18775 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18777 VkMemoryRequirements vkMemReq = {};
18778 bool requiresDedicatedAllocation =
false;
18779 bool prefersDedicatedAllocation =
false;
18780 allocator->GetImageMemoryRequirements(image, vkMemReq,
18781 requiresDedicatedAllocation, prefersDedicatedAllocation);
18783 VkResult result = allocator->AllocateMemory(
18785 requiresDedicatedAllocation,
18786 prefersDedicatedAllocation,
18791 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18795 #if VMA_RECORDING_ENABLED
18796 if(allocator->GetRecorder() != VMA_NULL)
18798 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18799 allocator->GetCurrentFrameIndex(),
18801 requiresDedicatedAllocation,
18802 prefersDedicatedAllocation,
18808 if(pAllocationInfo && result == VK_SUCCESS)
18810 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18820 VMA_ASSERT(allocator);
18822 if(allocation == VK_NULL_HANDLE)
18827 VMA_DEBUG_LOG(
"vmaFreeMemory");
18829 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18831 #if VMA_RECORDING_ENABLED
18832 if(allocator->GetRecorder() != VMA_NULL)
18834 allocator->GetRecorder()->RecordFreeMemory(
18835 allocator->GetCurrentFrameIndex(),
18840 allocator->FreeMemory(
18847 size_t allocationCount,
18850 if(allocationCount == 0)
18855 VMA_ASSERT(allocator);
18857 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18859 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18861 #if VMA_RECORDING_ENABLED
18862 if(allocator->GetRecorder() != VMA_NULL)
18864 allocator->GetRecorder()->RecordFreeMemoryPages(
18865 allocator->GetCurrentFrameIndex(),
18866 (uint64_t)allocationCount,
18871 allocator->FreeMemory(allocationCount, pAllocations);
18879 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18881 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18883 #if VMA_RECORDING_ENABLED
18884 if(allocator->GetRecorder() != VMA_NULL)
18886 allocator->GetRecorder()->RecordGetAllocationInfo(
18887 allocator->GetCurrentFrameIndex(),
18892 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18899 VMA_ASSERT(allocator && allocation);
18901 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18903 #if VMA_RECORDING_ENABLED
18904 if(allocator->GetRecorder() != VMA_NULL)
18906 allocator->GetRecorder()->RecordTouchAllocation(
18907 allocator->GetCurrentFrameIndex(),
18912 return allocator->TouchAllocation(allocation);
18920 VMA_ASSERT(allocator && allocation);
18922 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18924 allocation->SetUserData(allocator, pUserData);
18926 #if VMA_RECORDING_ENABLED
18927 if(allocator->GetRecorder() != VMA_NULL)
18929 allocator->GetRecorder()->RecordSetAllocationUserData(
18930 allocator->GetCurrentFrameIndex(),
18941 VMA_ASSERT(allocator && pAllocation);
18943 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18945 allocator->CreateLostAllocation(pAllocation);
18947 #if VMA_RECORDING_ENABLED
18948 if(allocator->GetRecorder() != VMA_NULL)
18950 allocator->GetRecorder()->RecordCreateLostAllocation(
18951 allocator->GetCurrentFrameIndex(),
18962 VMA_ASSERT(allocator && allocation && ppData);
18964 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18966 VkResult res = allocator->Map(allocation, ppData);
18968 #if VMA_RECORDING_ENABLED
18969 if(allocator->GetRecorder() != VMA_NULL)
18971 allocator->GetRecorder()->RecordMapMemory(
18972 allocator->GetCurrentFrameIndex(),
18984 VMA_ASSERT(allocator && allocation);
18986 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18988 #if VMA_RECORDING_ENABLED
18989 if(allocator->GetRecorder() != VMA_NULL)
18991 allocator->GetRecorder()->RecordUnmapMemory(
18992 allocator->GetCurrentFrameIndex(),
18997 allocator->Unmap(allocation);
19002 VMA_ASSERT(allocator && allocation);
19004 VMA_DEBUG_LOG(
"vmaFlushAllocation");
19006 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19008 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
19010 #if VMA_RECORDING_ENABLED
19011 if(allocator->GetRecorder() != VMA_NULL)
19013 allocator->GetRecorder()->RecordFlushAllocation(
19014 allocator->GetCurrentFrameIndex(),
19015 allocation, offset, size);
19024 VMA_ASSERT(allocator && allocation);
19026 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
19028 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19030 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
19032 #if VMA_RECORDING_ENABLED
19033 if(allocator->GetRecorder() != VMA_NULL)
19035 allocator->GetRecorder()->RecordInvalidateAllocation(
19036 allocator->GetCurrentFrameIndex(),
19037 allocation, offset, size);
19046 uint32_t allocationCount,
19048 const VkDeviceSize* offsets,
19049 const VkDeviceSize* sizes)
19051 VMA_ASSERT(allocator);
19053 if(allocationCount == 0)
19058 VMA_ASSERT(allocations);
19060 VMA_DEBUG_LOG(
"vmaFlushAllocations");
19062 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19064 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
19066 #if VMA_RECORDING_ENABLED
19067 if(allocator->GetRecorder() != VMA_NULL)
19078 uint32_t allocationCount,
19080 const VkDeviceSize* offsets,
19081 const VkDeviceSize* sizes)
19083 VMA_ASSERT(allocator);
19085 if(allocationCount == 0)
19090 VMA_ASSERT(allocations);
19092 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
19094 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19096 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
19098 #if VMA_RECORDING_ENABLED
19099 if(allocator->GetRecorder() != VMA_NULL)
19110 VMA_ASSERT(allocator);
19112 VMA_DEBUG_LOG(
"vmaCheckCorruption");
19114 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19116 return allocator->CheckCorruption(memoryTypeBits);
19122 size_t allocationCount,
19123 VkBool32* pAllocationsChanged,
19133 if(pDefragmentationInfo != VMA_NULL)
19147 if(res == VK_NOT_READY)
19160 VMA_ASSERT(allocator && pInfo && pContext);
19171 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
19173 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
19175 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19177 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
19179 #if VMA_RECORDING_ENABLED
19180 if(allocator->GetRecorder() != VMA_NULL)
19182 allocator->GetRecorder()->RecordDefragmentationBegin(
19183 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
19194 VMA_ASSERT(allocator);
19196 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
19198 if(context != VK_NULL_HANDLE)
19200 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19202 #if VMA_RECORDING_ENABLED
19203 if(allocator->GetRecorder() != VMA_NULL)
19205 allocator->GetRecorder()->RecordDefragmentationEnd(
19206 allocator->GetCurrentFrameIndex(), context);
19210 return allocator->DefragmentationEnd(context);
19224 VMA_ASSERT(allocator);
19227 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
19229 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19231 if(context == VK_NULL_HANDLE)
19237 return allocator->DefragmentationPassBegin(pInfo, context);
19243 VMA_ASSERT(allocator);
19245 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
19246 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19248 if(context == VK_NULL_HANDLE)
19251 return allocator->DefragmentationPassEnd(context);
19259 VMA_ASSERT(allocator && allocation && buffer);
19261 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
19263 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19265 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
19271 VkDeviceSize allocationLocalOffset,
19275 VMA_ASSERT(allocator && allocation && buffer);
19277 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
19279 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19281 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
19289 VMA_ASSERT(allocator && allocation && image);
19291 VMA_DEBUG_LOG(
"vmaBindImageMemory");
19293 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19295 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
19301 VkDeviceSize allocationLocalOffset,
19305 VMA_ASSERT(allocator && allocation && image);
19307 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
19309 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19311 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
19316 const VkBufferCreateInfo* pBufferCreateInfo,
19322 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
19324 if(pBufferCreateInfo->size == 0)
19326 return VK_ERROR_VALIDATION_FAILED_EXT;
19328 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19329 !allocator->m_UseKhrBufferDeviceAddress)
19331 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19332 return VK_ERROR_VALIDATION_FAILED_EXT;
19335 VMA_DEBUG_LOG(
"vmaCreateBuffer");
19337 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19339 *pBuffer = VK_NULL_HANDLE;
19340 *pAllocation = VK_NULL_HANDLE;
19343 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19344 allocator->m_hDevice,
19346 allocator->GetAllocationCallbacks(),
19351 VkMemoryRequirements vkMemReq = {};
19352 bool requiresDedicatedAllocation =
false;
19353 bool prefersDedicatedAllocation =
false;
19354 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19355 requiresDedicatedAllocation, prefersDedicatedAllocation);
19358 res = allocator->AllocateMemory(
19360 requiresDedicatedAllocation,
19361 prefersDedicatedAllocation,
19363 pBufferCreateInfo->usage,
19365 *pAllocationCreateInfo,
19366 VMA_SUBALLOCATION_TYPE_BUFFER,
19370 #if VMA_RECORDING_ENABLED
19371 if(allocator->GetRecorder() != VMA_NULL)
19373 allocator->GetRecorder()->RecordCreateBuffer(
19374 allocator->GetCurrentFrameIndex(),
19375 *pBufferCreateInfo,
19376 *pAllocationCreateInfo,
19386 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19391 #if VMA_STATS_STRING_ENABLED
19392 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19394 if(pAllocationInfo != VMA_NULL)
19396 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19401 allocator->FreeMemory(
19404 *pAllocation = VK_NULL_HANDLE;
19405 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19406 *pBuffer = VK_NULL_HANDLE;
19409 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19410 *pBuffer = VK_NULL_HANDLE;
19421 VMA_ASSERT(allocator);
19423 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19428 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19430 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19432 #if VMA_RECORDING_ENABLED
19433 if(allocator->GetRecorder() != VMA_NULL)
19435 allocator->GetRecorder()->RecordDestroyBuffer(
19436 allocator->GetCurrentFrameIndex(),
19441 if(buffer != VK_NULL_HANDLE)
19443 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19446 if(allocation != VK_NULL_HANDLE)
19448 allocator->FreeMemory(
19456 const VkImageCreateInfo* pImageCreateInfo,
19462 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19464 if(pImageCreateInfo->extent.width == 0 ||
19465 pImageCreateInfo->extent.height == 0 ||
19466 pImageCreateInfo->extent.depth == 0 ||
19467 pImageCreateInfo->mipLevels == 0 ||
19468 pImageCreateInfo->arrayLayers == 0)
19470 return VK_ERROR_VALIDATION_FAILED_EXT;
19473 VMA_DEBUG_LOG(
"vmaCreateImage");
19475 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19477 *pImage = VK_NULL_HANDLE;
19478 *pAllocation = VK_NULL_HANDLE;
19481 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19482 allocator->m_hDevice,
19484 allocator->GetAllocationCallbacks(),
19488 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19489 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19490 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19493 VkMemoryRequirements vkMemReq = {};
19494 bool requiresDedicatedAllocation =
false;
19495 bool prefersDedicatedAllocation =
false;
19496 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19497 requiresDedicatedAllocation, prefersDedicatedAllocation);
19499 res = allocator->AllocateMemory(
19501 requiresDedicatedAllocation,
19502 prefersDedicatedAllocation,
19506 *pAllocationCreateInfo,
19511 #if VMA_RECORDING_ENABLED
19512 if(allocator->GetRecorder() != VMA_NULL)
19514 allocator->GetRecorder()->RecordCreateImage(
19515 allocator->GetCurrentFrameIndex(),
19517 *pAllocationCreateInfo,
19527 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19532 #if VMA_STATS_STRING_ENABLED
19533 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19535 if(pAllocationInfo != VMA_NULL)
19537 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19542 allocator->FreeMemory(
19545 *pAllocation = VK_NULL_HANDLE;
19546 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19547 *pImage = VK_NULL_HANDLE;
19550 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19551 *pImage = VK_NULL_HANDLE;
19562 VMA_ASSERT(allocator);
19564 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19569 VMA_DEBUG_LOG(
"vmaDestroyImage");
19571 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19573 #if VMA_RECORDING_ENABLED
19574 if(allocator->GetRecorder() != VMA_NULL)
19576 allocator->GetRecorder()->RecordDestroyImage(
19577 allocator->GetCurrentFrameIndex(),
19582 if(image != VK_NULL_HANDLE)
19584 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19586 if(allocation != VK_NULL_HANDLE)
19588 allocator->FreeMemory(
Definition: vk_mem_alloc.h:2879
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2905
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2911
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2897
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2918
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2892
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:2925
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2887
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2881
Represents single memory allocation.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:3236
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:3260
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:3280
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:3241
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:3271
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:3285
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:3250
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:2413
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:2418
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2444
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:2469
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:2415
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null.
Definition: vk_mem_alloc.h:2475
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:2427
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2487
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:2424
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:2482
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:2421
uint32_t vulkanApiVersion
Optional. The highest version of Vulkan that the application is designed to use.
Definition: vk_mem_alloc.h:2496
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:2430
Represents main object of this library initialized.
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:2511
VkDevice device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:2526
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2516
VkPhysicalDevice physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:2521
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
Definition: vk_mem_alloc.h:2617
VkDeviceSize blockBytes
Sum size of all VkDeviceMemory blocks allocated from particular heap, in bytes.
Definition: vk_mem_alloc.h:2620
VkDeviceSize allocationBytes
Sum size of all allocations created in particular heap, in bytes.
Definition: vk_mem_alloc.h:2631
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:2641
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:2652
Represents Opaque object that represents started defragmentation process.
Parameters for defragmentation.
Definition: vk_mem_alloc.h:3635
const VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:3675
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:3641
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:3695
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3690
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:3638
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:3656
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:3659
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:3704
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:3685
const VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:3650
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3680
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:3726
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:3736
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3731
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:3717
uint32_t moveCount
Definition: vk_mem_alloc.h:3718
VmaDefragmentationPassMoveInfo * pMoves
Definition: vk_mem_alloc.h:3719
Definition: vk_mem_alloc.h:3707
VkDeviceMemory memory
Definition: vk_mem_alloc.h:3709
VkDeviceSize offset
Definition: vk_mem_alloc.h:3710
VmaAllocation allocation
Definition: vk_mem_alloc.h:3708
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:3740
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:3748
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3742
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:3744
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:3746
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:2222
void * pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:2228
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:2224
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:2226
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:3047
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:3095
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:3050
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:3053
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:3089
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:3062
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:3067
VkDeviceSize minAllocationAlignment
Additional minimum alignment to be used for all allocations created from this pool....
Definition: vk_mem_alloc.h:3102
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:3075
Represents custom memory pool.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:3107
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:3110
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:3129
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:3126
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:3116
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3113
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3119
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:2398
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:2408
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:2400
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:2578
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2589
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2589
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2588
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2590
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:2582
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2590
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2586
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:2580
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2589
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:2584
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2590
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2595
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2597
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2596
VmaStatInfo total
Definition: vk_mem_alloc.h:2598
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:2352
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:2362
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:2367
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:2355
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:2359
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:2364
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:2356
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:2363
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:2360
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:2354
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:2353
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:2366
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:2368
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:2361
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:2357
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:2358
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:2369
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:2365
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:2208
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VkResult vmaEndDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context)
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:2029
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:3043
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:2384
@ VMA_RECORD_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2392
@ VMA_RECORD_FLUSH_AFTER_CALL_BIT
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:2390
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:2232
@ VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
Definition: vk_mem_alloc.h:2307
@ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:2237
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT
Definition: vk_mem_alloc.h:2289
@ VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
Definition: vk_mem_alloc.h:2325
@ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT
Definition: vk_mem_alloc.h:2277
@ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:2262
@ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2344
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT
Definition: vk_mem_alloc.h:2342
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2876
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
void vmaFreeMemory(VmaAllocator allocator, const VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:3625
@ VMA_DEFRAGMENTATION_FLAG_INCREMENTAL
Definition: vk_mem_alloc.h:3626
@ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3627
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
struct VmaDefragmentationPassInfo VmaDefragmentationPassInfo
Parameters for incremental defragmentation steps.
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:2201
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, const VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:3629
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2987
@ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3022
@ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3041
@ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3033
@ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:3005
@ VMA_POOL_CREATE_ALGORITHM_MASK
Definition: vk_mem_alloc.h:3037
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkResult vmaDefragment(VmaAllocator allocator, const VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
VmaMemoryUsage
Definition: vk_mem_alloc.h:2700
@ VMA_MEMORY_USAGE_MAX_ENUM
Definition: vk_mem_alloc.h:2763
@ VMA_MEMORY_USAGE_CPU_ONLY
Definition: vk_mem_alloc.h:2731
@ VMA_MEMORY_USAGE_CPU_COPY
Definition: vk_mem_alloc.h:2753
@ VMA_MEMORY_USAGE_GPU_TO_CPU
Definition: vk_mem_alloc.h:2747
@ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED
Definition: vk_mem_alloc.h:2761
@ VMA_MEMORY_USAGE_CPU_TO_GPU
Definition: vk_mem_alloc.h:2738
@ VMA_MEMORY_USAGE_GPU_ONLY
Definition: vk_mem_alloc.h:2721
@ VMA_MEMORY_USAGE_UNKNOWN
Definition: vk_mem_alloc.h:2704
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
VkResult vmaInvalidateAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Invalidates memory of given set of allocations.
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
VkResult vmaBeginDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context, VmaDefragmentationPassInfo *pInfo)
VkResult vmaFlushAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Flushes memory of given set of allocations.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:2346
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2767
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Definition: vk_mem_alloc.h:2862
@ VMA_ALLOCATION_CREATE_MAPPED_BIT
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2798
@ VMA_ALLOCATION_CREATE_DONT_BIND_BIT
Definition: vk_mem_alloc.h:2835
@ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT
Definition: vk_mem_alloc.h:2855
@ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2774
@ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Definition: vk_mem_alloc.h:2829
@ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
Definition: vk_mem_alloc.h:2811
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT
Definition: vk_mem_alloc.h:2865
@ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
Definition: vk_mem_alloc.h:2818
@ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT
Definition: vk_mem_alloc.h:2844
@ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2785
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Definition: vk_mem_alloc.h:2859
@ VMA_ALLOCATION_CREATE_STRATEGY_MASK
Definition: vk_mem_alloc.h:2869
@ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT
Definition: vk_mem_alloc.h:2824
@ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT
Definition: vk_mem_alloc.h:2839
@ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT
Definition: vk_mem_alloc.h:2848
@ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2874
void vmaSetPoolName(VmaAllocator allocator, VmaPool pool, const char *pName)
Sets name of a custom pool.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
void vmaGetBudget(VmaAllocator allocator, VmaBudget *pBudget)
Retrieves information about current memory budget for all memory heaps.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
void vmaGetPoolName(VmaAllocator allocator, VmaPool pool, const char **ppName)
Retrieves name of a custom pool.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:2394
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
void vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo *pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.