23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2030 #ifndef VMA_RECORDING_ENABLED
2031 #define VMA_RECORDING_ENABLED 0
2034 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2035 #define NOMINMAX // For windows.h
2038 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2039 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2040 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2041 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2042 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2043 extern PFN_vkAllocateMemory vkAllocateMemory;
2044 extern PFN_vkFreeMemory vkFreeMemory;
2045 extern PFN_vkMapMemory vkMapMemory;
2046 extern PFN_vkUnmapMemory vkUnmapMemory;
2047 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2048 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2049 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2050 extern PFN_vkBindImageMemory vkBindImageMemory;
2051 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2052 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2053 extern PFN_vkCreateBuffer vkCreateBuffer;
2054 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2055 extern PFN_vkCreateImage vkCreateImage;
2056 extern PFN_vkDestroyImage vkDestroyImage;
2057 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2058 #if VMA_VULKAN_VERSION >= 1001000
2059 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2060 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2061 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2062 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2063 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2064 #endif // #if VMA_VULKAN_VERSION >= 1001000
2065 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
2068 #include <vulkan/vulkan.h>
2074 #if !defined(VMA_VULKAN_VERSION)
2075 #if defined(VK_VERSION_1_2)
2076 #define VMA_VULKAN_VERSION 1002000
2077 #elif defined(VK_VERSION_1_1)
2078 #define VMA_VULKAN_VERSION 1001000
2080 #define VMA_VULKAN_VERSION 1000000
2084 #if !defined(VMA_DEDICATED_ALLOCATION)
2085 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2086 #define VMA_DEDICATED_ALLOCATION 1
2088 #define VMA_DEDICATED_ALLOCATION 0
2092 #if !defined(VMA_BIND_MEMORY2)
2093 #if VK_KHR_bind_memory2
2094 #define VMA_BIND_MEMORY2 1
2096 #define VMA_BIND_MEMORY2 0
2100 #if !defined(VMA_MEMORY_BUDGET)
2101 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2102 #define VMA_MEMORY_BUDGET 1
2104 #define VMA_MEMORY_BUDGET 0
2109 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2110 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2111 #define VMA_BUFFER_DEVICE_ADDRESS 1
2113 #define VMA_BUFFER_DEVICE_ADDRESS 0
2118 #if !defined(VMA_MEMORY_PRIORITY)
2119 #if VK_EXT_memory_priority
2120 #define VMA_MEMORY_PRIORITY 1
2122 #define VMA_MEMORY_PRIORITY 0
2131 #ifndef VMA_CALL_PRE
2132 #define VMA_CALL_PRE
2134 #ifndef VMA_CALL_POST
2135 #define VMA_CALL_POST
2149 #ifndef VMA_LEN_IF_NOT_NULL
2150 #define VMA_LEN_IF_NOT_NULL(len)
2155 #ifndef VMA_NULLABLE
2157 #define VMA_NULLABLE _Nullable
2159 #define VMA_NULLABLE
2165 #ifndef VMA_NOT_NULL
2167 #define VMA_NOT_NULL _Nonnull
2169 #define VMA_NOT_NULL
2175 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2176 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2177 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2179 #define VMA_NOT_NULL_NON_DISPATCHABLE
2183 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2184 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2185 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2187 #define VMA_NULLABLE_NON_DISPATCHABLE
2205 uint32_t memoryType,
2206 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2208 void* VMA_NULLABLE pUserData);
2212 uint32_t memoryType,
2213 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2215 void* VMA_NULLABLE pUserData);
2372 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2373 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2374 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2376 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2377 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2378 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2380 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2381 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2471 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2544 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2552 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2562 uint32_t memoryTypeIndex,
2563 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2575 uint32_t frameIndex);
2671 #ifndef VMA_STATS_STRING_ENABLED
2672 #define VMA_STATS_STRING_ENABLED 1
2675 #if VMA_STATS_STRING_ENABLED
2682 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2683 VkBool32 detailedMap);
2687 char* VMA_NULLABLE pStatsString);
2689 #endif // #if VMA_STATS_STRING_ENABLED
2948 uint32_t memoryTypeBits,
2950 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2966 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2968 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2984 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2986 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3136 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3164 size_t* VMA_NULLABLE pLostAllocationCount);
3191 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3201 const char* VMA_NULLABLE pName);
3295 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3321 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3323 size_t allocationCount,
3324 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3325 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3335 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3343 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3368 size_t allocationCount,
3369 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3381 VkDeviceSize newSize);
3438 void* VMA_NULLABLE pUserData);
3495 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3533 VkDeviceSize offset,
3560 VkDeviceSize offset,
3579 uint32_t allocationCount,
3580 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3581 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3582 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3600 uint32_t allocationCount,
3601 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3602 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3603 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3682 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3716 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3854 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3855 size_t allocationCount,
3856 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3875 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3890 VkDeviceSize allocationLocalOffset,
3891 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3892 const void* VMA_NULLABLE pNext);
3909 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3924 VkDeviceSize allocationLocalOffset,
3925 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3926 const void* VMA_NULLABLE pNext);
3960 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3962 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3979 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3985 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3987 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
4004 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
4011 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
4014 #if defined(__cplusplus) && defined(__INTELLISENSE__)
4015 #define VMA_IMPLEMENTATION
4018 #ifdef VMA_IMPLEMENTATION
4019 #undef VMA_IMPLEMENTATION
4026 #if VMA_RECORDING_ENABLED
4029 #include <windows.h>
4049 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4050 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4059 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4060 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4061 #if defined(VK_NO_PROTOTYPES)
4062 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4063 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4076 #if VMA_USE_STL_CONTAINERS
4077 #define VMA_USE_STL_VECTOR 1
4078 #define VMA_USE_STL_UNORDERED_MAP 1
4079 #define VMA_USE_STL_LIST 1
4082 #ifndef VMA_USE_STL_SHARED_MUTEX
4084 #if __cplusplus >= 201703L
4085 #define VMA_USE_STL_SHARED_MUTEX 1
4089 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4090 #define VMA_USE_STL_SHARED_MUTEX 1
4092 #define VMA_USE_STL_SHARED_MUTEX 0
4100 #if VMA_USE_STL_VECTOR
4104 #if VMA_USE_STL_UNORDERED_MAP
4105 #include <unordered_map>
4108 #if VMA_USE_STL_LIST
4117 #include <algorithm>
4122 #define VMA_NULL nullptr
4125 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4127 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4130 if(alignment <
sizeof(
void*))
4132 alignment =
sizeof(
void*);
4135 return memalign(alignment, size);
4137 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4140 #if defined(__APPLE__)
4141 #include <AvailabilityMacros.h>
4144 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4146 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4147 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4154 if (__builtin_available(macOS 10.15, iOS 13, *))
4155 return aligned_alloc(alignment, size);
4159 if(alignment <
sizeof(
void*))
4161 alignment =
sizeof(
void*);
4165 if(posix_memalign(&pointer, alignment, size) == 0)
4169 #elif defined(_WIN32)
4170 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4172 return _aligned_malloc(size, alignment);
4175 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4177 return aligned_alloc(alignment, size);
4182 static void vma_aligned_free(
void* ptr)
4187 static void vma_aligned_free(
void* ptr)
4201 #define VMA_ASSERT(expr)
4203 #define VMA_ASSERT(expr) assert(expr)
4209 #ifndef VMA_HEAVY_ASSERT
4211 #define VMA_HEAVY_ASSERT(expr)
4213 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
4217 #ifndef VMA_ALIGN_OF
4218 #define VMA_ALIGN_OF(type) (__alignof(type))
4221 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4222 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4225 #ifndef VMA_SYSTEM_ALIGNED_FREE
4227 #if defined(VMA_SYSTEM_FREE)
4228 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4230 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4235 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4239 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4243 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4247 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4250 #ifndef VMA_DEBUG_LOG
4251 #define VMA_DEBUG_LOG(format, ...)
4261 #if VMA_STATS_STRING_ENABLED
4262 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4264 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4266 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4268 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4270 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4272 snprintf(outStr, strLen,
"%p", ptr);
4280 void Lock() { m_Mutex.lock(); }
4281 void Unlock() { m_Mutex.unlock(); }
4282 bool TryLock() {
return m_Mutex.try_lock(); }
4286 #define VMA_MUTEX VmaMutex
4290 #ifndef VMA_RW_MUTEX
4291 #if VMA_USE_STL_SHARED_MUTEX
4293 #include <shared_mutex>
4297 void LockRead() { m_Mutex.lock_shared(); }
4298 void UnlockRead() { m_Mutex.unlock_shared(); }
4299 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4300 void LockWrite() { m_Mutex.lock(); }
4301 void UnlockWrite() { m_Mutex.unlock(); }
4302 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4304 std::shared_mutex m_Mutex;
4306 #define VMA_RW_MUTEX VmaRWMutex
4307 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4313 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4314 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4315 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4316 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4317 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4318 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4319 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4323 #define VMA_RW_MUTEX VmaRWMutex
4329 void LockRead() { m_Mutex.Lock(); }
4330 void UnlockRead() { m_Mutex.Unlock(); }
4331 bool TryLockRead() {
return m_Mutex.TryLock(); }
4332 void LockWrite() { m_Mutex.Lock(); }
4333 void UnlockWrite() { m_Mutex.Unlock(); }
4334 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4338 #define VMA_RW_MUTEX VmaRWMutex
4339 #endif // #if VMA_USE_STL_SHARED_MUTEX
4340 #endif // #ifndef VMA_RW_MUTEX
4345 #ifndef VMA_ATOMIC_UINT32
4347 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4350 #ifndef VMA_ATOMIC_UINT64
4352 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4355 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4360 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4363 #ifndef VMA_DEBUG_ALIGNMENT
4368 #define VMA_DEBUG_ALIGNMENT (1)
4371 #ifndef VMA_DEBUG_MARGIN
4376 #define VMA_DEBUG_MARGIN (0)
4379 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4384 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4387 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4393 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4396 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4401 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4404 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4409 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4412 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4413 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4417 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4418 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4422 #ifndef VMA_CLASS_NO_COPY
4423 #define VMA_CLASS_NO_COPY(className) \
4425 className(const className&) = delete; \
4426 className& operator=(const className&) = delete;
4429 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4432 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4434 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4435 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4443 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4444 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4445 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4447 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4449 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4450 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4453 static inline uint32_t VmaCountBitsSet(uint32_t v)
4455 uint32_t c = v - ((v >> 1) & 0x55555555);
4456 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4457 c = ((c >> 4) + c) & 0x0F0F0F0F;
4458 c = ((c >> 8) + c) & 0x00FF00FF;
4459 c = ((c >> 16) + c) & 0x0000FFFF;
4468 template <
typename T>
4469 inline bool VmaIsPow2(T x)
4471 return (x & (x-1)) == 0;
4476 template <
typename T>
4477 static inline T VmaAlignUp(T val, T alignment)
4479 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4480 return (val + alignment - 1) & ~(alignment - 1);
4484 template <
typename T>
4485 static inline T VmaAlignDown(T val, T alignment)
4487 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4488 return val & ~(alignment - 1);
4492 template <
typename T>
4493 static inline T VmaRoundDiv(T x, T y)
4495 return (x + (y / (T)2)) / y;
4499 static inline uint32_t VmaNextPow2(uint32_t v)
4510 static inline uint64_t VmaNextPow2(uint64_t v)
4524 static inline uint32_t VmaPrevPow2(uint32_t v)
4534 static inline uint64_t VmaPrevPow2(uint64_t v)
4546 static inline bool VmaStrIsEmpty(
const char* pStr)
4548 return pStr == VMA_NULL || *pStr ==
'\0';
4551 #if VMA_STATS_STRING_ENABLED
4553 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4569 #endif // #if VMA_STATS_STRING_ENABLED
4573 template<
typename Iterator,
typename Compare>
4574 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4576 Iterator centerValue = end; --centerValue;
4577 Iterator insertIndex = beg;
4578 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4580 if(cmp(*memTypeIndex, *centerValue))
4582 if(insertIndex != memTypeIndex)
4584 VMA_SWAP(*memTypeIndex, *insertIndex);
4589 if(insertIndex != centerValue)
4591 VMA_SWAP(*insertIndex, *centerValue);
4596 template<
typename Iterator,
typename Compare>
4597 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4601 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4602 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4603 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4607 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4609 #endif // #ifndef VMA_SORT
4618 static inline bool VmaBlocksOnSamePage(
4619 VkDeviceSize resourceAOffset,
4620 VkDeviceSize resourceASize,
4621 VkDeviceSize resourceBOffset,
4622 VkDeviceSize pageSize)
4624 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4625 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4626 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4627 VkDeviceSize resourceBStart = resourceBOffset;
4628 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4629 return resourceAEndPage == resourceBStartPage;
4632 enum VmaSuballocationType
4634 VMA_SUBALLOCATION_TYPE_FREE = 0,
4635 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4636 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4637 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4638 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4639 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4640 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4649 static inline bool VmaIsBufferImageGranularityConflict(
4650 VmaSuballocationType suballocType1,
4651 VmaSuballocationType suballocType2)
4653 if(suballocType1 > suballocType2)
4655 VMA_SWAP(suballocType1, suballocType2);
4658 switch(suballocType1)
4660 case VMA_SUBALLOCATION_TYPE_FREE:
4662 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4664 case VMA_SUBALLOCATION_TYPE_BUFFER:
4666 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4667 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4668 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4670 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4671 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4672 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4673 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4675 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4676 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4684 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4686 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4687 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4688 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4689 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4691 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4698 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4700 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4701 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4702 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4703 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4705 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4718 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4720 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4721 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4722 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4723 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4729 VMA_CLASS_NO_COPY(VmaMutexLock)
4731 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4732 m_pMutex(useMutex ? &mutex : VMA_NULL)
4733 {
if(m_pMutex) { m_pMutex->Lock(); } }
4735 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4737 VMA_MUTEX* m_pMutex;
4741 struct VmaMutexLockRead
4743 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4745 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4746 m_pMutex(useMutex ? &mutex : VMA_NULL)
4747 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4748 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4750 VMA_RW_MUTEX* m_pMutex;
4754 struct VmaMutexLockWrite
4756 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4758 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4759 m_pMutex(useMutex ? &mutex : VMA_NULL)
4760 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4761 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4763 VMA_RW_MUTEX* m_pMutex;
4766 #if VMA_DEBUG_GLOBAL_MUTEX
4767 static VMA_MUTEX gDebugGlobalMutex;
4768 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4770 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4774 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4785 template <
typename CmpLess,
typename IterT,
typename KeyT>
4786 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4788 size_t down = 0, up = (end - beg);
4791 const size_t mid = down + (up - down) / 2;
4792 if(cmp(*(beg+mid), key))
4804 template<
typename CmpLess,
typename IterT,
typename KeyT>
4805 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4807 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4808 beg, end, value, cmp);
4810 (!cmp(*it, value) && !cmp(value, *it)))
4822 template<
typename T>
4823 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4825 for(uint32_t i = 0; i < count; ++i)
4827 const T iPtr = arr[i];
4828 if(iPtr == VMA_NULL)
4832 for(uint32_t j = i + 1; j < count; ++j)
4843 template<
typename MainT,
typename NewT>
4844 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4846 newStruct->pNext = mainStruct->pNext;
4847 mainStruct->pNext = newStruct;
4853 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4855 void* result = VMA_NULL;
4856 if((pAllocationCallbacks != VMA_NULL) &&
4857 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4859 result = (*pAllocationCallbacks->pfnAllocation)(
4860 pAllocationCallbacks->pUserData,
4863 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4867 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4869 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4873 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4875 if((pAllocationCallbacks != VMA_NULL) &&
4876 (pAllocationCallbacks->pfnFree != VMA_NULL))
4878 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4882 VMA_SYSTEM_ALIGNED_FREE(ptr);
4886 template<
typename T>
4887 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4889 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4892 template<
typename T>
4893 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4895 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4898 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4900 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4902 template<
typename T>
4903 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4906 VmaFree(pAllocationCallbacks, ptr);
4909 template<
typename T>
4910 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4914 for(
size_t i = count; i--; )
4918 VmaFree(pAllocationCallbacks, ptr);
4922 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4924 if(srcStr != VMA_NULL)
4926 const size_t len = strlen(srcStr);
4927 char*
const result = vma_new_array(allocs,
char, len + 1);
4928 memcpy(result, srcStr, len + 1);
4937 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4941 const size_t len = strlen(str);
4942 vma_delete_array(allocs, str, len + 1);
4947 template<
typename T>
4948 class VmaStlAllocator
4951 const VkAllocationCallbacks*
const m_pCallbacks;
4952 typedef T value_type;
4954 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4955 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4957 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4958 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4960 template<
typename U>
4961 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4963 return m_pCallbacks == rhs.m_pCallbacks;
4965 template<
typename U>
4966 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4968 return m_pCallbacks != rhs.m_pCallbacks;
4971 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4974 #if VMA_USE_STL_VECTOR
4976 #define VmaVector std::vector
4978 template<
typename T,
typename allocatorT>
4979 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4981 vec.insert(vec.begin() + index, item);
4984 template<
typename T,
typename allocatorT>
4985 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4987 vec.erase(vec.begin() + index);
4990 #else // #if VMA_USE_STL_VECTOR
4995 template<
typename T,
typename AllocatorT>
4999 typedef T value_type;
5001 VmaVector(
const AllocatorT& allocator) :
5002 m_Allocator(allocator),
5009 VmaVector(
size_t count,
const AllocatorT& allocator) :
5010 m_Allocator(allocator),
5011 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
5019 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
5020 : VmaVector(count, allocator) {}
5022 VmaVector(
const VmaVector<T, AllocatorT>& src) :
5023 m_Allocator(src.m_Allocator),
5024 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
5025 m_Count(src.m_Count),
5026 m_Capacity(src.m_Count)
5030 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
5036 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5039 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5043 resize(rhs.m_Count);
5046 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5052 bool empty()
const {
return m_Count == 0; }
5053 size_t size()
const {
return m_Count; }
5054 T* data() {
return m_pArray; }
5055 const T* data()
const {
return m_pArray; }
5057 T& operator[](
size_t index)
5059 VMA_HEAVY_ASSERT(index < m_Count);
5060 return m_pArray[index];
5062 const T& operator[](
size_t index)
const
5064 VMA_HEAVY_ASSERT(index < m_Count);
5065 return m_pArray[index];
5070 VMA_HEAVY_ASSERT(m_Count > 0);
5073 const T& front()
const
5075 VMA_HEAVY_ASSERT(m_Count > 0);
5080 VMA_HEAVY_ASSERT(m_Count > 0);
5081 return m_pArray[m_Count - 1];
5083 const T& back()
const
5085 VMA_HEAVY_ASSERT(m_Count > 0);
5086 return m_pArray[m_Count - 1];
5089 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5091 newCapacity = VMA_MAX(newCapacity, m_Count);
5093 if((newCapacity < m_Capacity) && !freeMemory)
5095 newCapacity = m_Capacity;
5098 if(newCapacity != m_Capacity)
5100 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5103 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5105 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5106 m_Capacity = newCapacity;
5107 m_pArray = newArray;
5111 void resize(
size_t newCount,
bool freeMemory =
false)
5113 size_t newCapacity = m_Capacity;
5114 if(newCount > m_Capacity)
5116 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5120 newCapacity = newCount;
5123 if(newCapacity != m_Capacity)
5125 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5126 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5127 if(elementsToCopy != 0)
5129 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5131 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5132 m_Capacity = newCapacity;
5133 m_pArray = newArray;
5139 void clear(
bool freeMemory =
false)
5141 resize(0, freeMemory);
5144 void insert(
size_t index,
const T& src)
5146 VMA_HEAVY_ASSERT(index <= m_Count);
5147 const size_t oldCount = size();
5148 resize(oldCount + 1);
5149 if(index < oldCount)
5151 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5153 m_pArray[index] = src;
5156 void remove(
size_t index)
5158 VMA_HEAVY_ASSERT(index < m_Count);
5159 const size_t oldCount = size();
5160 if(index < oldCount - 1)
5162 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5164 resize(oldCount - 1);
5167 void push_back(
const T& src)
5169 const size_t newIndex = size();
5170 resize(newIndex + 1);
5171 m_pArray[newIndex] = src;
5176 VMA_HEAVY_ASSERT(m_Count > 0);
5180 void push_front(
const T& src)
5187 VMA_HEAVY_ASSERT(m_Count > 0);
5191 typedef T* iterator;
5193 iterator begin() {
return m_pArray; }
5194 iterator end() {
return m_pArray + m_Count; }
5197 AllocatorT m_Allocator;
5203 template<
typename T,
typename allocatorT>
5204 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5206 vec.insert(index, item);
5209 template<
typename T,
typename allocatorT>
5210 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5215 #endif // #if VMA_USE_STL_VECTOR
5217 template<
typename CmpLess,
typename VectorT>
5218 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5220 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5222 vector.data() + vector.size(),
5224 CmpLess()) - vector.data();
5225 VmaVectorInsert(vector, indexToInsert, value);
5226 return indexToInsert;
5229 template<
typename CmpLess,
typename VectorT>
5230 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5233 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5238 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5240 size_t indexToRemove = it - vector.begin();
5241 VmaVectorRemove(vector, indexToRemove);
5258 template<
typename T,
typename AllocatorT,
size_t N>
5259 class VmaSmallVector
5262 typedef T value_type;
5264 VmaSmallVector(
const AllocatorT& allocator) :
5266 m_DynamicArray(allocator)
5269 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5271 m_DynamicArray(count > N ? count : 0, allocator)
5274 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5275 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5276 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5277 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5279 bool empty()
const {
return m_Count == 0; }
5280 size_t size()
const {
return m_Count; }
5281 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5282 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5284 T& operator[](
size_t index)
5286 VMA_HEAVY_ASSERT(index < m_Count);
5287 return data()[index];
5289 const T& operator[](
size_t index)
const
5291 VMA_HEAVY_ASSERT(index < m_Count);
5292 return data()[index];
5297 VMA_HEAVY_ASSERT(m_Count > 0);
5300 const T& front()
const
5302 VMA_HEAVY_ASSERT(m_Count > 0);
5307 VMA_HEAVY_ASSERT(m_Count > 0);
5308 return data()[m_Count - 1];
5310 const T& back()
const
5312 VMA_HEAVY_ASSERT(m_Count > 0);
5313 return data()[m_Count - 1];
5316 void resize(
size_t newCount,
bool freeMemory =
false)
5318 if(newCount > N && m_Count > N)
5321 m_DynamicArray.resize(newCount, freeMemory);
5323 else if(newCount > N && m_Count <= N)
5326 m_DynamicArray.resize(newCount, freeMemory);
5329 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5332 else if(newCount <= N && m_Count > N)
5337 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5339 m_DynamicArray.resize(0, freeMemory);
5348 void clear(
bool freeMemory =
false)
5350 m_DynamicArray.clear(freeMemory);
5354 void insert(
size_t index,
const T& src)
5356 VMA_HEAVY_ASSERT(index <= m_Count);
5357 const size_t oldCount = size();
5358 resize(oldCount + 1);
5359 T*
const dataPtr = data();
5360 if(index < oldCount)
5363 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5365 dataPtr[index] = src;
5368 void remove(
size_t index)
5370 VMA_HEAVY_ASSERT(index < m_Count);
5371 const size_t oldCount = size();
5372 if(index < oldCount - 1)
5375 T*
const dataPtr = data();
5376 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5378 resize(oldCount - 1);
5381 void push_back(
const T& src)
5383 const size_t newIndex = size();
5384 resize(newIndex + 1);
5385 data()[newIndex] = src;
5390 VMA_HEAVY_ASSERT(m_Count > 0);
5394 void push_front(
const T& src)
5401 VMA_HEAVY_ASSERT(m_Count > 0);
5405 typedef T* iterator;
5407 iterator begin() {
return data(); }
5408 iterator end() {
return data() + m_Count; }
5413 VmaVector<T, AllocatorT> m_DynamicArray;
5424 template<
typename T>
5425 class VmaPoolAllocator
5427 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5429 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5430 ~VmaPoolAllocator();
5431 template<
typename... Types> T* Alloc(Types... args);
5437 uint32_t NextFreeIndex;
5438 alignas(T)
char Value[
sizeof(T)];
5445 uint32_t FirstFreeIndex;
5448 const VkAllocationCallbacks* m_pAllocationCallbacks;
5449 const uint32_t m_FirstBlockCapacity;
5450 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5452 ItemBlock& CreateNewBlock();
5455 template<
typename T>
5456 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5457 m_pAllocationCallbacks(pAllocationCallbacks),
5458 m_FirstBlockCapacity(firstBlockCapacity),
5459 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5461 VMA_ASSERT(m_FirstBlockCapacity > 1);
5464 template<
typename T>
5465 VmaPoolAllocator<T>::~VmaPoolAllocator()
5467 for(
size_t i = m_ItemBlocks.size(); i--; )
5468 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5469 m_ItemBlocks.clear();
5472 template<
typename T>
5473 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5475 for(
size_t i = m_ItemBlocks.size(); i--; )
5477 ItemBlock& block = m_ItemBlocks[i];
5479 if(block.FirstFreeIndex != UINT32_MAX)
5481 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5482 block.FirstFreeIndex = pItem->NextFreeIndex;
5483 T* result = (T*)&pItem->Value;
5484 new(result)T(std::forward<Types>(args)...);
5490 ItemBlock& newBlock = CreateNewBlock();
5491 Item*
const pItem = &newBlock.pItems[0];
5492 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5493 T* result = (T*)&pItem->Value;
5494 new(result)T(std::forward<Types>(args)...);
5498 template<
typename T>
5499 void VmaPoolAllocator<T>::Free(T* ptr)
5502 for(
size_t i = m_ItemBlocks.size(); i--; )
5504 ItemBlock& block = m_ItemBlocks[i];
5508 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5511 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5514 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5515 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5516 block.FirstFreeIndex = index;
5520 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5523 template<
typename T>
5524 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5526 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5527 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5529 const ItemBlock newBlock = {
5530 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5534 m_ItemBlocks.push_back(newBlock);
5537 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5538 newBlock.pItems[i].NextFreeIndex = i + 1;
5539 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5540 return m_ItemBlocks.back();
5546 #if VMA_USE_STL_LIST
5548 #define VmaList std::list
5550 #else // #if VMA_USE_STL_LIST
5552 template<
typename T>
5561 template<
typename T>
5564 VMA_CLASS_NO_COPY(VmaRawList)
5566 typedef VmaListItem<T> ItemType;
5568 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5572 size_t GetCount()
const {
return m_Count; }
5573 bool IsEmpty()
const {
return m_Count == 0; }
5575 ItemType* Front() {
return m_pFront; }
5576 const ItemType* Front()
const {
return m_pFront; }
5577 ItemType* Back() {
return m_pBack; }
5578 const ItemType* Back()
const {
return m_pBack; }
5580 ItemType* PushBack();
5581 ItemType* PushFront();
5582 ItemType* PushBack(
const T& value);
5583 ItemType* PushFront(
const T& value);
5588 ItemType* InsertBefore(ItemType* pItem);
5590 ItemType* InsertAfter(ItemType* pItem);
5592 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5593 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5595 void Remove(ItemType* pItem);
5598 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5599 VmaPoolAllocator<ItemType> m_ItemAllocator;
5605 template<
typename T>
5606 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5607 m_pAllocationCallbacks(pAllocationCallbacks),
5608 m_ItemAllocator(pAllocationCallbacks, 128),
5615 template<
typename T>
5616 VmaRawList<T>::~VmaRawList()
5622 template<
typename T>
5623 void VmaRawList<T>::Clear()
5625 if(IsEmpty() ==
false)
5627 ItemType* pItem = m_pBack;
5628 while(pItem != VMA_NULL)
5630 ItemType*
const pPrevItem = pItem->pPrev;
5631 m_ItemAllocator.Free(pItem);
5634 m_pFront = VMA_NULL;
5640 template<
typename T>
5641 VmaListItem<T>* VmaRawList<T>::PushBack()
5643 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5644 pNewItem->pNext = VMA_NULL;
5647 pNewItem->pPrev = VMA_NULL;
5648 m_pFront = pNewItem;
5654 pNewItem->pPrev = m_pBack;
5655 m_pBack->pNext = pNewItem;
5662 template<
typename T>
5663 VmaListItem<T>* VmaRawList<T>::PushFront()
5665 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5666 pNewItem->pPrev = VMA_NULL;
5669 pNewItem->pNext = VMA_NULL;
5670 m_pFront = pNewItem;
5676 pNewItem->pNext = m_pFront;
5677 m_pFront->pPrev = pNewItem;
5678 m_pFront = pNewItem;
5684 template<
typename T>
5685 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5687 ItemType*
const pNewItem = PushBack();
5688 pNewItem->Value = value;
5692 template<
typename T>
5693 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5695 ItemType*
const pNewItem = PushFront();
5696 pNewItem->Value = value;
5700 template<
typename T>
5701 void VmaRawList<T>::PopBack()
5703 VMA_HEAVY_ASSERT(m_Count > 0);
5704 ItemType*
const pBackItem = m_pBack;
5705 ItemType*
const pPrevItem = pBackItem->pPrev;
5706 if(pPrevItem != VMA_NULL)
5708 pPrevItem->pNext = VMA_NULL;
5710 m_pBack = pPrevItem;
5711 m_ItemAllocator.Free(pBackItem);
5715 template<
typename T>
5716 void VmaRawList<T>::PopFront()
5718 VMA_HEAVY_ASSERT(m_Count > 0);
5719 ItemType*
const pFrontItem = m_pFront;
5720 ItemType*
const pNextItem = pFrontItem->pNext;
5721 if(pNextItem != VMA_NULL)
5723 pNextItem->pPrev = VMA_NULL;
5725 m_pFront = pNextItem;
5726 m_ItemAllocator.Free(pFrontItem);
5730 template<
typename T>
5731 void VmaRawList<T>::Remove(ItemType* pItem)
5733 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5734 VMA_HEAVY_ASSERT(m_Count > 0);
5736 if(pItem->pPrev != VMA_NULL)
5738 pItem->pPrev->pNext = pItem->pNext;
5742 VMA_HEAVY_ASSERT(m_pFront == pItem);
5743 m_pFront = pItem->pNext;
5746 if(pItem->pNext != VMA_NULL)
5748 pItem->pNext->pPrev = pItem->pPrev;
5752 VMA_HEAVY_ASSERT(m_pBack == pItem);
5753 m_pBack = pItem->pPrev;
5756 m_ItemAllocator.Free(pItem);
5760 template<
typename T>
5761 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5763 if(pItem != VMA_NULL)
5765 ItemType*
const prevItem = pItem->pPrev;
5766 ItemType*
const newItem = m_ItemAllocator.Alloc();
5767 newItem->pPrev = prevItem;
5768 newItem->pNext = pItem;
5769 pItem->pPrev = newItem;
5770 if(prevItem != VMA_NULL)
5772 prevItem->pNext = newItem;
5776 VMA_HEAVY_ASSERT(m_pFront == pItem);
5786 template<
typename T>
5787 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5789 if(pItem != VMA_NULL)
5791 ItemType*
const nextItem = pItem->pNext;
5792 ItemType*
const newItem = m_ItemAllocator.Alloc();
5793 newItem->pNext = nextItem;
5794 newItem->pPrev = pItem;
5795 pItem->pNext = newItem;
5796 if(nextItem != VMA_NULL)
5798 nextItem->pPrev = newItem;
5802 VMA_HEAVY_ASSERT(m_pBack == pItem);
5812 template<
typename T>
5813 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5815 ItemType*
const newItem = InsertBefore(pItem);
5816 newItem->Value = value;
5820 template<
typename T>
5821 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5823 ItemType*
const newItem = InsertAfter(pItem);
5824 newItem->Value = value;
5828 template<
typename T,
typename AllocatorT>
5831 VMA_CLASS_NO_COPY(VmaList)
5842 T& operator*()
const
5844 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5845 return m_pItem->Value;
5847 T* operator->()
const
5849 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5850 return &m_pItem->Value;
5853 iterator& operator++()
5855 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5856 m_pItem = m_pItem->pNext;
5859 iterator& operator--()
5861 if(m_pItem != VMA_NULL)
5863 m_pItem = m_pItem->pPrev;
5867 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5868 m_pItem = m_pList->Back();
5873 iterator operator++(
int)
5875 iterator result = *
this;
5879 iterator operator--(
int)
5881 iterator result = *
this;
5886 bool operator==(
const iterator& rhs)
const
5888 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5889 return m_pItem == rhs.m_pItem;
5891 bool operator!=(
const iterator& rhs)
const
5893 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5894 return m_pItem != rhs.m_pItem;
5898 VmaRawList<T>* m_pList;
5899 VmaListItem<T>* m_pItem;
5901 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5907 friend class VmaList<T, AllocatorT>;
5910 class const_iterator
5919 const_iterator(
const iterator& src) :
5920 m_pList(src.m_pList),
5921 m_pItem(src.m_pItem)
5925 const T& operator*()
const
5927 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5928 return m_pItem->Value;
5930 const T* operator->()
const
5932 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5933 return &m_pItem->Value;
5936 const_iterator& operator++()
5938 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5939 m_pItem = m_pItem->pNext;
5942 const_iterator& operator--()
5944 if(m_pItem != VMA_NULL)
5946 m_pItem = m_pItem->pPrev;
5950 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5951 m_pItem = m_pList->Back();
5956 const_iterator operator++(
int)
5958 const_iterator result = *
this;
5962 const_iterator operator--(
int)
5964 const_iterator result = *
this;
5969 bool operator==(
const const_iterator& rhs)
const
5971 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5972 return m_pItem == rhs.m_pItem;
5974 bool operator!=(
const const_iterator& rhs)
const
5976 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5977 return m_pItem != rhs.m_pItem;
5981 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5987 const VmaRawList<T>* m_pList;
5988 const VmaListItem<T>* m_pItem;
5990 friend class VmaList<T, AllocatorT>;
5993 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5995 bool empty()
const {
return m_RawList.IsEmpty(); }
5996 size_t size()
const {
return m_RawList.GetCount(); }
5998 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5999 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
6001 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
6002 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
6004 void clear() { m_RawList.Clear(); }
6005 void push_back(
const T& value) { m_RawList.PushBack(value); }
6006 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
6007 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
6010 VmaRawList<T> m_RawList;
6013 #endif // #if VMA_USE_STL_LIST
6021 #if VMA_USE_STL_UNORDERED_MAP
6023 #define VmaPair std::pair
6025 #define VMA_MAP_TYPE(KeyT, ValueT) \
6026 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
6028 #else // #if VMA_USE_STL_UNORDERED_MAP
6030 template<
typename T1,
typename T2>
6036 VmaPair() : first(), second() { }
6037 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6043 template<
typename KeyT,
typename ValueT>
6047 typedef VmaPair<KeyT, ValueT> PairType;
6048 typedef PairType* iterator;
6050 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6052 iterator begin() {
return m_Vector.begin(); }
6053 iterator end() {
return m_Vector.end(); }
6055 void insert(
const PairType& pair);
6056 iterator find(
const KeyT& key);
6057 void erase(iterator it);
6060 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6063 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6065 template<
typename FirstT,
typename SecondT>
6066 struct VmaPairFirstLess
6068 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6070 return lhs.first < rhs.first;
6072 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6074 return lhs.first < rhsFirst;
6078 template<
typename KeyT,
typename ValueT>
6079 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6081 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6083 m_Vector.data() + m_Vector.size(),
6085 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6086 VmaVectorInsert(m_Vector, indexToInsert, pair);
6089 template<
typename KeyT,
typename ValueT>
6090 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6092 PairType* it = VmaBinaryFindFirstNotLess(
6094 m_Vector.data() + m_Vector.size(),
6096 VmaPairFirstLess<KeyT, ValueT>());
6097 if((it != m_Vector.end()) && (it->first == key))
6103 return m_Vector.end();
6107 template<
typename KeyT,
typename ValueT>
6108 void VmaMap<KeyT, ValueT>::erase(iterator it)
6110 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6113 #endif // #if VMA_USE_STL_UNORDERED_MAP
6119 class VmaDeviceMemoryBlock;
6121 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6123 struct VmaAllocation_T
6126 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6130 FLAG_USER_DATA_STRING = 0x01,
6134 enum ALLOCATION_TYPE
6136 ALLOCATION_TYPE_NONE,
6137 ALLOCATION_TYPE_BLOCK,
6138 ALLOCATION_TYPE_DEDICATED,
6145 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6148 m_pUserData{VMA_NULL},
6149 m_LastUseFrameIndex{currentFrameIndex},
6150 m_MemoryTypeIndex{0},
6151 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6152 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6154 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6156 #if VMA_STATS_STRING_ENABLED
6157 m_CreationFrameIndex = currentFrameIndex;
6158 m_BufferImageUsage = 0;
6164 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6167 VMA_ASSERT(m_pUserData == VMA_NULL);
6170 void InitBlockAllocation(
6171 VmaDeviceMemoryBlock* block,
6172 VkDeviceSize offset,
6173 VkDeviceSize alignment,
6175 uint32_t memoryTypeIndex,
6176 VmaSuballocationType suballocationType,
6180 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6181 VMA_ASSERT(block != VMA_NULL);
6182 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6183 m_Alignment = alignment;
6185 m_MemoryTypeIndex = memoryTypeIndex;
6186 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6187 m_SuballocationType = (uint8_t)suballocationType;
6188 m_BlockAllocation.m_Block = block;
6189 m_BlockAllocation.m_Offset = offset;
6190 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6195 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6196 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6197 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6198 m_MemoryTypeIndex = 0;
6199 m_BlockAllocation.m_Block = VMA_NULL;
6200 m_BlockAllocation.m_Offset = 0;
6201 m_BlockAllocation.m_CanBecomeLost =
true;
6204 void ChangeBlockAllocation(
6206 VmaDeviceMemoryBlock* block,
6207 VkDeviceSize offset);
6209 void ChangeOffset(VkDeviceSize newOffset);
6212 void InitDedicatedAllocation(
6213 uint32_t memoryTypeIndex,
6214 VkDeviceMemory hMemory,
6215 VmaSuballocationType suballocationType,
6219 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6220 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6221 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6224 m_MemoryTypeIndex = memoryTypeIndex;
6225 m_SuballocationType = (uint8_t)suballocationType;
6226 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6227 m_DedicatedAllocation.m_hMemory = hMemory;
6228 m_DedicatedAllocation.m_pMappedData = pMappedData;
6231 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6232 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6233 VkDeviceSize GetSize()
const {
return m_Size; }
6234 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6235 void* GetUserData()
const {
return m_pUserData; }
6236 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6237 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6239 VmaDeviceMemoryBlock* GetBlock()
const
6241 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6242 return m_BlockAllocation.m_Block;
6244 VkDeviceSize GetOffset()
const;
6245 VkDeviceMemory GetMemory()
const;
6246 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6247 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6248 void* GetMappedData()
const;
6249 bool CanBecomeLost()
const;
6251 uint32_t GetLastUseFrameIndex()
const
6253 return m_LastUseFrameIndex.load();
6255 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6257 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6267 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6269 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6271 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6282 void BlockAllocMap();
6283 void BlockAllocUnmap();
6284 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6287 #if VMA_STATS_STRING_ENABLED
6288 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6289 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6291 void InitBufferImageUsage(uint32_t bufferImageUsage)
6293 VMA_ASSERT(m_BufferImageUsage == 0);
6294 m_BufferImageUsage = bufferImageUsage;
6297 void PrintParameters(
class VmaJsonWriter& json)
const;
6301 VkDeviceSize m_Alignment;
6302 VkDeviceSize m_Size;
6304 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6305 uint32_t m_MemoryTypeIndex;
6307 uint8_t m_SuballocationType;
6314 struct BlockAllocation
6316 VmaDeviceMemoryBlock* m_Block;
6317 VkDeviceSize m_Offset;
6318 bool m_CanBecomeLost;
6322 struct DedicatedAllocation
6324 VkDeviceMemory m_hMemory;
6325 void* m_pMappedData;
6331 BlockAllocation m_BlockAllocation;
6333 DedicatedAllocation m_DedicatedAllocation;
6336 #if VMA_STATS_STRING_ENABLED
6337 uint32_t m_CreationFrameIndex;
6338 uint32_t m_BufferImageUsage;
6348 struct VmaSuballocation
6350 VkDeviceSize offset;
6353 VmaSuballocationType type;
6357 struct VmaSuballocationOffsetLess
6359 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6361 return lhs.offset < rhs.offset;
6364 struct VmaSuballocationOffsetGreater
6366 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6368 return lhs.offset > rhs.offset;
6372 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6375 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6377 enum class VmaAllocationRequestType
6399 struct VmaAllocationRequest
6401 VkDeviceSize offset;
6402 VkDeviceSize sumFreeSize;
6403 VkDeviceSize sumItemSize;
6404 VmaSuballocationList::iterator item;
6405 size_t itemsToMakeLostCount;
6407 VmaAllocationRequestType type;
6409 VkDeviceSize CalcCost()
const
6411 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6419 class VmaBlockMetadata
6423 virtual ~VmaBlockMetadata() { }
6424 virtual void Init(VkDeviceSize size) { m_Size = size; }
6427 virtual bool Validate()
const = 0;
6428 VkDeviceSize GetSize()
const {
return m_Size; }
6429 virtual size_t GetAllocationCount()
const = 0;
6430 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6431 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6433 virtual bool IsEmpty()
const = 0;
6435 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6437 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6439 #if VMA_STATS_STRING_ENABLED
6440 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6446 virtual bool CreateAllocationRequest(
6447 uint32_t currentFrameIndex,
6448 uint32_t frameInUseCount,
6449 VkDeviceSize bufferImageGranularity,
6450 VkDeviceSize allocSize,
6451 VkDeviceSize allocAlignment,
6453 VmaSuballocationType allocType,
6454 bool canMakeOtherLost,
6457 VmaAllocationRequest* pAllocationRequest) = 0;
6459 virtual bool MakeRequestedAllocationsLost(
6460 uint32_t currentFrameIndex,
6461 uint32_t frameInUseCount,
6462 VmaAllocationRequest* pAllocationRequest) = 0;
6464 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6466 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6470 const VmaAllocationRequest& request,
6471 VmaSuballocationType type,
6472 VkDeviceSize allocSize,
6477 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6480 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6482 #if VMA_STATS_STRING_ENABLED
6483 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6484 VkDeviceSize unusedBytes,
6485 size_t allocationCount,
6486 size_t unusedRangeCount)
const;
6487 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6488 VkDeviceSize offset,
6490 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6491 VkDeviceSize offset,
6492 VkDeviceSize size)
const;
6493 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6497 VkDeviceSize m_Size;
6498 const VkAllocationCallbacks* m_pAllocationCallbacks;
6501 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6502 VMA_ASSERT(0 && "Validation failed: " #cond); \
6506 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6508 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6511 virtual ~VmaBlockMetadata_Generic();
6512 virtual void Init(VkDeviceSize size);
6514 virtual bool Validate()
const;
6515 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6516 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6517 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6518 virtual bool IsEmpty()
const;
6520 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6521 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6523 #if VMA_STATS_STRING_ENABLED
6524 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6527 virtual bool CreateAllocationRequest(
6528 uint32_t currentFrameIndex,
6529 uint32_t frameInUseCount,
6530 VkDeviceSize bufferImageGranularity,
6531 VkDeviceSize allocSize,
6532 VkDeviceSize allocAlignment,
6534 VmaSuballocationType allocType,
6535 bool canMakeOtherLost,
6537 VmaAllocationRequest* pAllocationRequest);
6539 virtual bool MakeRequestedAllocationsLost(
6540 uint32_t currentFrameIndex,
6541 uint32_t frameInUseCount,
6542 VmaAllocationRequest* pAllocationRequest);
6544 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6546 virtual VkResult CheckCorruption(
const void* pBlockData);
6549 const VmaAllocationRequest& request,
6550 VmaSuballocationType type,
6551 VkDeviceSize allocSize,
6555 virtual void FreeAtOffset(VkDeviceSize offset);
6560 bool IsBufferImageGranularityConflictPossible(
6561 VkDeviceSize bufferImageGranularity,
6562 VmaSuballocationType& inOutPrevSuballocType)
const;
6565 friend class VmaDefragmentationAlgorithm_Generic;
6566 friend class VmaDefragmentationAlgorithm_Fast;
6568 uint32_t m_FreeCount;
6569 VkDeviceSize m_SumFreeSize;
6570 VmaSuballocationList m_Suballocations;
6573 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6575 bool ValidateFreeSuballocationList()
const;
6579 bool CheckAllocation(
6580 uint32_t currentFrameIndex,
6581 uint32_t frameInUseCount,
6582 VkDeviceSize bufferImageGranularity,
6583 VkDeviceSize allocSize,
6584 VkDeviceSize allocAlignment,
6585 VmaSuballocationType allocType,
6586 VmaSuballocationList::const_iterator suballocItem,
6587 bool canMakeOtherLost,
6588 VkDeviceSize* pOffset,
6589 size_t* itemsToMakeLostCount,
6590 VkDeviceSize* pSumFreeSize,
6591 VkDeviceSize* pSumItemSize)
const;
6593 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6597 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6600 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6603 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6684 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6686 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6689 virtual ~VmaBlockMetadata_Linear();
6690 virtual void Init(VkDeviceSize size);
6692 virtual bool Validate()
const;
6693 virtual size_t GetAllocationCount()
const;
6694 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6695 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6696 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6698 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6699 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6701 #if VMA_STATS_STRING_ENABLED
6702 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6705 virtual bool CreateAllocationRequest(
6706 uint32_t currentFrameIndex,
6707 uint32_t frameInUseCount,
6708 VkDeviceSize bufferImageGranularity,
6709 VkDeviceSize allocSize,
6710 VkDeviceSize allocAlignment,
6712 VmaSuballocationType allocType,
6713 bool canMakeOtherLost,
6715 VmaAllocationRequest* pAllocationRequest);
6717 virtual bool MakeRequestedAllocationsLost(
6718 uint32_t currentFrameIndex,
6719 uint32_t frameInUseCount,
6720 VmaAllocationRequest* pAllocationRequest);
6722 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6724 virtual VkResult CheckCorruption(
const void* pBlockData);
6727 const VmaAllocationRequest& request,
6728 VmaSuballocationType type,
6729 VkDeviceSize allocSize,
6733 virtual void FreeAtOffset(VkDeviceSize offset);
6743 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6745 enum SECOND_VECTOR_MODE
6747 SECOND_VECTOR_EMPTY,
6752 SECOND_VECTOR_RING_BUFFER,
6758 SECOND_VECTOR_DOUBLE_STACK,
6761 VkDeviceSize m_SumFreeSize;
6762 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6763 uint32_t m_1stVectorIndex;
6764 SECOND_VECTOR_MODE m_2ndVectorMode;
6766 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6767 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6768 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6769 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6772 size_t m_1stNullItemsBeginCount;
6774 size_t m_1stNullItemsMiddleCount;
6776 size_t m_2ndNullItemsCount;
6778 bool ShouldCompact1st()
const;
6779 void CleanupAfterFree();
6781 bool CreateAllocationRequest_LowerAddress(
6782 uint32_t currentFrameIndex,
6783 uint32_t frameInUseCount,
6784 VkDeviceSize bufferImageGranularity,
6785 VkDeviceSize allocSize,
6786 VkDeviceSize allocAlignment,
6787 VmaSuballocationType allocType,
6788 bool canMakeOtherLost,
6790 VmaAllocationRequest* pAllocationRequest);
6791 bool CreateAllocationRequest_UpperAddress(
6792 uint32_t currentFrameIndex,
6793 uint32_t frameInUseCount,
6794 VkDeviceSize bufferImageGranularity,
6795 VkDeviceSize allocSize,
6796 VkDeviceSize allocAlignment,
6797 VmaSuballocationType allocType,
6798 bool canMakeOtherLost,
6800 VmaAllocationRequest* pAllocationRequest);
6814 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6816 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6819 virtual ~VmaBlockMetadata_Buddy();
6820 virtual void Init(VkDeviceSize size);
6822 virtual bool Validate()
const;
6823 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6824 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6825 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6826 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6828 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6829 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6831 #if VMA_STATS_STRING_ENABLED
6832 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6835 virtual bool CreateAllocationRequest(
6836 uint32_t currentFrameIndex,
6837 uint32_t frameInUseCount,
6838 VkDeviceSize bufferImageGranularity,
6839 VkDeviceSize allocSize,
6840 VkDeviceSize allocAlignment,
6842 VmaSuballocationType allocType,
6843 bool canMakeOtherLost,
6845 VmaAllocationRequest* pAllocationRequest);
6847 virtual bool MakeRequestedAllocationsLost(
6848 uint32_t currentFrameIndex,
6849 uint32_t frameInUseCount,
6850 VmaAllocationRequest* pAllocationRequest);
6852 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6854 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6857 const VmaAllocationRequest& request,
6858 VmaSuballocationType type,
6859 VkDeviceSize allocSize,
6862 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6863 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6866 static const VkDeviceSize MIN_NODE_SIZE = 32;
6867 static const size_t MAX_LEVELS = 30;
6869 struct ValidationContext
6871 size_t calculatedAllocationCount;
6872 size_t calculatedFreeCount;
6873 VkDeviceSize calculatedSumFreeSize;
6875 ValidationContext() :
6876 calculatedAllocationCount(0),
6877 calculatedFreeCount(0),
6878 calculatedSumFreeSize(0) { }
6883 VkDeviceSize offset;
6913 VkDeviceSize m_UsableSize;
6914 uint32_t m_LevelCount;
6920 } m_FreeList[MAX_LEVELS];
6922 size_t m_AllocationCount;
6926 VkDeviceSize m_SumFreeSize;
6928 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6929 void DeleteNode(Node* node);
6930 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6931 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6932 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6934 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6935 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6939 void AddToFreeListFront(uint32_t level, Node* node);
6943 void RemoveFromFreeList(uint32_t level, Node* node);
6945 #if VMA_STATS_STRING_ENABLED
6946 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6956 class VmaDeviceMemoryBlock
6958 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6960 VmaBlockMetadata* m_pMetadata;
6964 ~VmaDeviceMemoryBlock()
6966 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6967 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6974 uint32_t newMemoryTypeIndex,
6975 VkDeviceMemory newMemory,
6976 VkDeviceSize newSize,
6978 uint32_t algorithm);
6982 VmaPool GetParentPool()
const {
return m_hParentPool; }
6983 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6984 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6985 uint32_t GetId()
const {
return m_Id; }
6986 void* GetMappedData()
const {
return m_pMappedData; }
6989 bool Validate()
const;
6994 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6997 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6998 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7000 VkResult BindBufferMemory(
7003 VkDeviceSize allocationLocalOffset,
7006 VkResult BindImageMemory(
7009 VkDeviceSize allocationLocalOffset,
7015 uint32_t m_MemoryTypeIndex;
7017 VkDeviceMemory m_hMemory;
7025 uint32_t m_MapCount;
7026 void* m_pMappedData;
7029 struct VmaPointerLess
7031 bool operator()(
const void* lhs,
const void* rhs)
const
7037 struct VmaDefragmentationMove
7039 size_t srcBlockIndex;
7040 size_t dstBlockIndex;
7041 VkDeviceSize srcOffset;
7042 VkDeviceSize dstOffset;
7045 VmaDeviceMemoryBlock* pSrcBlock;
7046 VmaDeviceMemoryBlock* pDstBlock;
7049 class VmaDefragmentationAlgorithm;
7057 struct VmaBlockVector
7059 VMA_CLASS_NO_COPY(VmaBlockVector)
7064 uint32_t memoryTypeIndex,
7065 VkDeviceSize preferredBlockSize,
7066 size_t minBlockCount,
7067 size_t maxBlockCount,
7068 VkDeviceSize bufferImageGranularity,
7069 uint32_t frameInUseCount,
7070 bool explicitBlockSize,
7075 VkResult CreateMinBlocks();
7077 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7078 VmaPool GetParentPool()
const {
return m_hParentPool; }
7079 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7080 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7081 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7082 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7083 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7084 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7089 bool IsCorruptionDetectionEnabled()
const;
7092 uint32_t currentFrameIndex,
7094 VkDeviceSize alignment,
7096 VmaSuballocationType suballocType,
7097 size_t allocationCount,
7105 #if VMA_STATS_STRING_ENABLED
7106 void PrintDetailedMap(
class VmaJsonWriter& json);
7109 void MakePoolAllocationsLost(
7110 uint32_t currentFrameIndex,
7111 size_t* pLostAllocationCount);
7112 VkResult CheckCorruption();
7116 class VmaBlockVectorDefragmentationContext* pCtx,
7118 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7119 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7120 VkCommandBuffer commandBuffer);
7121 void DefragmentationEnd(
7122 class VmaBlockVectorDefragmentationContext* pCtx,
7126 uint32_t ProcessDefragmentations(
7127 class VmaBlockVectorDefragmentationContext *pCtx,
7130 void CommitDefragmentations(
7131 class VmaBlockVectorDefragmentationContext *pCtx,
7137 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7138 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7139 size_t CalcAllocationCount()
const;
7140 bool IsBufferImageGranularityConflictPossible()
const;
7143 friend class VmaDefragmentationAlgorithm_Generic;
7147 const uint32_t m_MemoryTypeIndex;
7148 const VkDeviceSize m_PreferredBlockSize;
7149 const size_t m_MinBlockCount;
7150 const size_t m_MaxBlockCount;
7151 const VkDeviceSize m_BufferImageGranularity;
7152 const uint32_t m_FrameInUseCount;
7153 const bool m_ExplicitBlockSize;
7154 const uint32_t m_Algorithm;
7155 const float m_Priority;
7156 VMA_RW_MUTEX m_Mutex;
7160 bool m_HasEmptyBlock;
7162 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7163 uint32_t m_NextBlockId;
7165 VkDeviceSize CalcMaxBlockSize()
const;
7168 void Remove(VmaDeviceMemoryBlock* pBlock);
7172 void IncrementallySortBlocks();
7174 VkResult AllocatePage(
7175 uint32_t currentFrameIndex,
7177 VkDeviceSize alignment,
7179 VmaSuballocationType suballocType,
7183 VkResult AllocateFromBlock(
7184 VmaDeviceMemoryBlock* pBlock,
7185 uint32_t currentFrameIndex,
7187 VkDeviceSize alignment,
7190 VmaSuballocationType suballocType,
7194 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7197 void ApplyDefragmentationMovesCpu(
7198 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7199 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7201 void ApplyDefragmentationMovesGpu(
7202 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7203 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7204 VkCommandBuffer commandBuffer);
7212 void UpdateHasEmptyBlock();
7217 VMA_CLASS_NO_COPY(VmaPool_T)
7219 VmaBlockVector m_BlockVector;
7224 VkDeviceSize preferredBlockSize);
7227 uint32_t GetId()
const {
return m_Id; }
7228 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7230 const char* GetName()
const {
return m_Name; }
7231 void SetName(
const char* pName);
7233 #if VMA_STATS_STRING_ENABLED
7249 class VmaDefragmentationAlgorithm
7251 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7253 VmaDefragmentationAlgorithm(
7255 VmaBlockVector* pBlockVector,
7256 uint32_t currentFrameIndex) :
7257 m_hAllocator(hAllocator),
7258 m_pBlockVector(pBlockVector),
7259 m_CurrentFrameIndex(currentFrameIndex)
7262 virtual ~VmaDefragmentationAlgorithm()
7266 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7267 virtual void AddAll() = 0;
7269 virtual VkResult Defragment(
7270 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7271 VkDeviceSize maxBytesToMove,
7272 uint32_t maxAllocationsToMove,
7275 virtual VkDeviceSize GetBytesMoved()
const = 0;
7276 virtual uint32_t GetAllocationsMoved()
const = 0;
7280 VmaBlockVector*
const m_pBlockVector;
7281 const uint32_t m_CurrentFrameIndex;
7283 struct AllocationInfo
7286 VkBool32* m_pChanged;
7289 m_hAllocation(VK_NULL_HANDLE),
7290 m_pChanged(VMA_NULL)
7294 m_hAllocation(hAlloc),
7295 m_pChanged(pChanged)
7301 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7303 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7305 VmaDefragmentationAlgorithm_Generic(
7307 VmaBlockVector* pBlockVector,
7308 uint32_t currentFrameIndex,
7309 bool overlappingMoveSupported);
7310 virtual ~VmaDefragmentationAlgorithm_Generic();
7312 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7313 virtual void AddAll() { m_AllAllocations =
true; }
7315 virtual VkResult Defragment(
7316 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7317 VkDeviceSize maxBytesToMove,
7318 uint32_t maxAllocationsToMove,
7321 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7322 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7325 uint32_t m_AllocationCount;
7326 bool m_AllAllocations;
7328 VkDeviceSize m_BytesMoved;
7329 uint32_t m_AllocationsMoved;
7331 struct AllocationInfoSizeGreater
7333 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7335 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7339 struct AllocationInfoOffsetGreater
7341 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7343 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7349 size_t m_OriginalBlockIndex;
7350 VmaDeviceMemoryBlock* m_pBlock;
7351 bool m_HasNonMovableAllocations;
7352 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7354 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7355 m_OriginalBlockIndex(SIZE_MAX),
7357 m_HasNonMovableAllocations(true),
7358 m_Allocations(pAllocationCallbacks)
7362 void CalcHasNonMovableAllocations()
7364 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7365 const size_t defragmentAllocCount = m_Allocations.size();
7366 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7369 void SortAllocationsBySizeDescending()
7371 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7374 void SortAllocationsByOffsetDescending()
7376 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7380 struct BlockPointerLess
7382 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7384 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7386 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7388 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7394 struct BlockInfoCompareMoveDestination
7396 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7398 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7402 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7406 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7414 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7415 BlockInfoVector m_Blocks;
7417 VkResult DefragmentRound(
7418 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7419 VkDeviceSize maxBytesToMove,
7420 uint32_t maxAllocationsToMove,
7421 bool freeOldAllocations);
7423 size_t CalcBlocksWithNonMovableCount()
const;
7425 static bool MoveMakesSense(
7426 size_t dstBlockIndex, VkDeviceSize dstOffset,
7427 size_t srcBlockIndex, VkDeviceSize srcOffset);
7430 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7432 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7434 VmaDefragmentationAlgorithm_Fast(
7436 VmaBlockVector* pBlockVector,
7437 uint32_t currentFrameIndex,
7438 bool overlappingMoveSupported);
7439 virtual ~VmaDefragmentationAlgorithm_Fast();
7441 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7442 virtual void AddAll() { m_AllAllocations =
true; }
7444 virtual VkResult Defragment(
7445 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7446 VkDeviceSize maxBytesToMove,
7447 uint32_t maxAllocationsToMove,
7450 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7451 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7456 size_t origBlockIndex;
7459 class FreeSpaceDatabase
7465 s.blockInfoIndex = SIZE_MAX;
7466 for(
size_t i = 0; i < MAX_COUNT; ++i)
7468 m_FreeSpaces[i] = s;
7472 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7474 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7480 size_t bestIndex = SIZE_MAX;
7481 for(
size_t i = 0; i < MAX_COUNT; ++i)
7484 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7489 if(m_FreeSpaces[i].size < size &&
7490 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7496 if(bestIndex != SIZE_MAX)
7498 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7499 m_FreeSpaces[bestIndex].offset = offset;
7500 m_FreeSpaces[bestIndex].size = size;
7504 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7505 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7507 size_t bestIndex = SIZE_MAX;
7508 VkDeviceSize bestFreeSpaceAfter = 0;
7509 for(
size_t i = 0; i < MAX_COUNT; ++i)
7512 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7514 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7516 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7518 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7520 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7523 bestFreeSpaceAfter = freeSpaceAfter;
7529 if(bestIndex != SIZE_MAX)
7531 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7532 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7534 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7537 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7538 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7539 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7544 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7554 static const size_t MAX_COUNT = 4;
7558 size_t blockInfoIndex;
7559 VkDeviceSize offset;
7561 } m_FreeSpaces[MAX_COUNT];
7564 const bool m_OverlappingMoveSupported;
7566 uint32_t m_AllocationCount;
7567 bool m_AllAllocations;
7569 VkDeviceSize m_BytesMoved;
7570 uint32_t m_AllocationsMoved;
7572 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7574 void PreprocessMetadata();
7575 void PostprocessMetadata();
7576 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7579 struct VmaBlockDefragmentationContext
7583 BLOCK_FLAG_USED = 0x00000001,
7589 class VmaBlockVectorDefragmentationContext
7591 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7595 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7596 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7597 uint32_t defragmentationMovesProcessed;
7598 uint32_t defragmentationMovesCommitted;
7599 bool hasDefragmentationPlan;
7601 VmaBlockVectorDefragmentationContext(
7604 VmaBlockVector* pBlockVector,
7605 uint32_t currFrameIndex);
7606 ~VmaBlockVectorDefragmentationContext();
7608 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7609 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7610 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7612 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7613 void AddAll() { m_AllAllocations =
true; }
7622 VmaBlockVector*
const m_pBlockVector;
7623 const uint32_t m_CurrFrameIndex;
7625 VmaDefragmentationAlgorithm* m_pAlgorithm;
7633 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7634 bool m_AllAllocations;
7637 struct VmaDefragmentationContext_T
7640 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7642 VmaDefragmentationContext_T(
7644 uint32_t currFrameIndex,
7647 ~VmaDefragmentationContext_T();
7649 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7650 void AddAllocations(
7651 uint32_t allocationCount,
7653 VkBool32* pAllocationsChanged);
7661 VkResult Defragment(
7662 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7663 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7667 VkResult DefragmentPassEnd();
7671 const uint32_t m_CurrFrameIndex;
7672 const uint32_t m_Flags;
7675 VkDeviceSize m_MaxCpuBytesToMove;
7676 uint32_t m_MaxCpuAllocationsToMove;
7677 VkDeviceSize m_MaxGpuBytesToMove;
7678 uint32_t m_MaxGpuAllocationsToMove;
7681 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7683 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7686 #if VMA_RECORDING_ENABLED
7693 void WriteConfiguration(
7694 const VkPhysicalDeviceProperties& devProps,
7695 const VkPhysicalDeviceMemoryProperties& memProps,
7696 uint32_t vulkanApiVersion,
7697 bool dedicatedAllocationExtensionEnabled,
7698 bool bindMemory2ExtensionEnabled,
7699 bool memoryBudgetExtensionEnabled,
7700 bool deviceCoherentMemoryExtensionEnabled);
7703 void RecordCreateAllocator(uint32_t frameIndex);
7704 void RecordDestroyAllocator(uint32_t frameIndex);
7705 void RecordCreatePool(uint32_t frameIndex,
7708 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7709 void RecordAllocateMemory(uint32_t frameIndex,
7710 const VkMemoryRequirements& vkMemReq,
7713 void RecordAllocateMemoryPages(uint32_t frameIndex,
7714 const VkMemoryRequirements& vkMemReq,
7716 uint64_t allocationCount,
7718 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7719 const VkMemoryRequirements& vkMemReq,
7720 bool requiresDedicatedAllocation,
7721 bool prefersDedicatedAllocation,
7724 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7725 const VkMemoryRequirements& vkMemReq,
7726 bool requiresDedicatedAllocation,
7727 bool prefersDedicatedAllocation,
7730 void RecordFreeMemory(uint32_t frameIndex,
7732 void RecordFreeMemoryPages(uint32_t frameIndex,
7733 uint64_t allocationCount,
7735 void RecordSetAllocationUserData(uint32_t frameIndex,
7737 const void* pUserData);
7738 void RecordCreateLostAllocation(uint32_t frameIndex,
7740 void RecordMapMemory(uint32_t frameIndex,
7742 void RecordUnmapMemory(uint32_t frameIndex,
7744 void RecordFlushAllocation(uint32_t frameIndex,
7745 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7746 void RecordInvalidateAllocation(uint32_t frameIndex,
7747 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7748 void RecordCreateBuffer(uint32_t frameIndex,
7749 const VkBufferCreateInfo& bufCreateInfo,
7752 void RecordCreateImage(uint32_t frameIndex,
7753 const VkImageCreateInfo& imageCreateInfo,
7756 void RecordDestroyBuffer(uint32_t frameIndex,
7758 void RecordDestroyImage(uint32_t frameIndex,
7760 void RecordTouchAllocation(uint32_t frameIndex,
7762 void RecordGetAllocationInfo(uint32_t frameIndex,
7764 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7766 void RecordDefragmentationBegin(uint32_t frameIndex,
7769 void RecordDefragmentationEnd(uint32_t frameIndex,
7771 void RecordSetPoolName(uint32_t frameIndex,
7782 class UserDataString
7786 const char* GetString()
const {
return m_Str; }
7796 VMA_MUTEX m_FileMutex;
7797 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
7799 void GetBasicParams(CallParams& outParams);
7802 template<
typename T>
7803 void PrintPointerList(uint64_t count,
const T* pItems)
7807 fprintf(m_File,
"%p", pItems[0]);
7808 for(uint64_t i = 1; i < count; ++i)
7810 fprintf(m_File,
" %p", pItems[i]);
7815 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7819 #endif // #if VMA_RECORDING_ENABLED
7824 class VmaAllocationObjectAllocator
7826 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7828 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7830 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7835 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7838 struct VmaCurrentBudgetData
7840 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7841 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7843 #if VMA_MEMORY_BUDGET
7844 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7845 VMA_RW_MUTEX m_BudgetMutex;
7846 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7847 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7848 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7849 #endif // #if VMA_MEMORY_BUDGET
7851 VmaCurrentBudgetData()
7853 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7855 m_BlockBytes[heapIndex] = 0;
7856 m_AllocationBytes[heapIndex] = 0;
7857 #if VMA_MEMORY_BUDGET
7858 m_VulkanUsage[heapIndex] = 0;
7859 m_VulkanBudget[heapIndex] = 0;
7860 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7864 #if VMA_MEMORY_BUDGET
7865 m_OperationsSinceBudgetFetch = 0;
7869 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7871 m_AllocationBytes[heapIndex] += allocationSize;
7872 #if VMA_MEMORY_BUDGET
7873 ++m_OperationsSinceBudgetFetch;
7877 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7879 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7880 m_AllocationBytes[heapIndex] -= allocationSize;
7881 #if VMA_MEMORY_BUDGET
7882 ++m_OperationsSinceBudgetFetch;
7888 struct VmaAllocator_T
7890 VMA_CLASS_NO_COPY(VmaAllocator_T)
7893 uint32_t m_VulkanApiVersion;
7894 bool m_UseKhrDedicatedAllocation;
7895 bool m_UseKhrBindMemory2;
7896 bool m_UseExtMemoryBudget;
7897 bool m_UseAmdDeviceCoherentMemory;
7898 bool m_UseKhrBufferDeviceAddress;
7899 bool m_UseExtMemoryPriority;
7901 VkInstance m_hInstance;
7902 bool m_AllocationCallbacksSpecified;
7903 VkAllocationCallbacks m_AllocationCallbacks;
7905 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7908 uint32_t m_HeapSizeLimitMask;
7910 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7911 VkPhysicalDeviceMemoryProperties m_MemProps;
7914 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7917 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7918 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7919 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7921 VmaCurrentBudgetData m_Budget;
7927 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7929 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7933 return m_VulkanFunctions;
7936 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7938 VkDeviceSize GetBufferImageGranularity()
const
7941 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7942 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7945 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7946 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7948 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7950 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7951 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7954 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7956 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7957 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7960 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7962 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7963 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7964 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7967 bool IsIntegratedGpu()
const
7969 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7972 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7974 #if VMA_RECORDING_ENABLED
7975 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7978 void GetBufferMemoryRequirements(
7980 VkMemoryRequirements& memReq,
7981 bool& requiresDedicatedAllocation,
7982 bool& prefersDedicatedAllocation)
const;
7983 void GetImageMemoryRequirements(
7985 VkMemoryRequirements& memReq,
7986 bool& requiresDedicatedAllocation,
7987 bool& prefersDedicatedAllocation)
const;
7990 VkResult AllocateMemory(
7991 const VkMemoryRequirements& vkMemReq,
7992 bool requiresDedicatedAllocation,
7993 bool prefersDedicatedAllocation,
7994 VkBuffer dedicatedBuffer,
7995 VkBufferUsageFlags dedicatedBufferUsage,
7996 VkImage dedicatedImage,
7998 VmaSuballocationType suballocType,
7999 size_t allocationCount,
8004 size_t allocationCount,
8007 VkResult ResizeAllocation(
8009 VkDeviceSize newSize);
8011 void CalculateStats(
VmaStats* pStats);
8014 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
8016 #if VMA_STATS_STRING_ENABLED
8017 void PrintDetailedMap(
class VmaJsonWriter& json);
8020 VkResult DefragmentationBegin(
8024 VkResult DefragmentationEnd(
8027 VkResult DefragmentationPassBegin(
8030 VkResult DefragmentationPassEnd(
8037 void DestroyPool(
VmaPool pool);
8040 void SetCurrentFrameIndex(uint32_t frameIndex);
8041 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8043 void MakePoolAllocationsLost(
8045 size_t* pLostAllocationCount);
8046 VkResult CheckPoolCorruption(
VmaPool hPool);
8047 VkResult CheckCorruption(uint32_t memoryTypeBits);
8052 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8054 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8056 VkResult BindVulkanBuffer(
8057 VkDeviceMemory memory,
8058 VkDeviceSize memoryOffset,
8062 VkResult BindVulkanImage(
8063 VkDeviceMemory memory,
8064 VkDeviceSize memoryOffset,
8071 VkResult BindBufferMemory(
8073 VkDeviceSize allocationLocalOffset,
8076 VkResult BindImageMemory(
8078 VkDeviceSize allocationLocalOffset,
8082 VkResult FlushOrInvalidateAllocation(
8084 VkDeviceSize offset, VkDeviceSize size,
8085 VMA_CACHE_OPERATION op);
8086 VkResult FlushOrInvalidateAllocations(
8087 uint32_t allocationCount,
8089 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8090 VMA_CACHE_OPERATION op);
8092 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8098 uint32_t GetGpuDefragmentationMemoryTypeBits();
8101 VkDeviceSize m_PreferredLargeHeapBlockSize;
8103 VkPhysicalDevice m_PhysicalDevice;
8104 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8105 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8107 VMA_RW_MUTEX m_PoolsMutex;
8109 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
8110 uint32_t m_NextPoolId;
8115 uint32_t m_GlobalMemoryTypeBits;
8117 #if VMA_RECORDING_ENABLED
8118 VmaRecorder* m_pRecorder;
8123 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8124 void ImportVulkanFunctions_Static();
8129 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8130 void ImportVulkanFunctions_Dynamic();
8133 void ValidateVulkanFunctions();
8135 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8137 VkResult AllocateMemoryOfType(
8139 VkDeviceSize alignment,
8140 bool dedicatedAllocation,
8141 VkBuffer dedicatedBuffer,
8142 VkBufferUsageFlags dedicatedBufferUsage,
8143 VkImage dedicatedImage,
8145 uint32_t memTypeIndex,
8146 VmaSuballocationType suballocType,
8147 size_t allocationCount,
8151 VkResult AllocateDedicatedMemoryPage(
8153 VmaSuballocationType suballocType,
8154 uint32_t memTypeIndex,
8155 const VkMemoryAllocateInfo& allocInfo,
8157 bool isUserDataString,
8162 VkResult AllocateDedicatedMemory(
8164 VmaSuballocationType suballocType,
8165 uint32_t memTypeIndex,
8168 bool isUserDataString,
8171 VkBuffer dedicatedBuffer,
8172 VkBufferUsageFlags dedicatedBufferUsage,
8173 VkImage dedicatedImage,
8174 size_t allocationCount,
8183 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8185 uint32_t CalculateGlobalMemoryTypeBits()
const;
8187 bool GetFlushOrInvalidateRange(
8189 VkDeviceSize offset, VkDeviceSize size,
8190 VkMappedMemoryRange& outRange)
const;
8192 #if VMA_MEMORY_BUDGET
8193 void UpdateVulkanBudget();
8194 #endif // #if VMA_MEMORY_BUDGET
8200 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8202 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8205 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8207 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8210 template<
typename T>
8213 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8216 template<
typename T>
8217 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8219 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8222 template<
typename T>
8223 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8228 VmaFree(hAllocator, ptr);
8232 template<
typename T>
8233 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8237 for(
size_t i = count; i--; )
8239 VmaFree(hAllocator, ptr);
8246 #if VMA_STATS_STRING_ENABLED
8248 class VmaStringBuilder
8251 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8252 size_t GetLength()
const {
return m_Data.size(); }
8253 const char* GetData()
const {
return m_Data.data(); }
8255 void Add(
char ch) { m_Data.push_back(ch); }
8256 void Add(
const char* pStr);
8257 void AddNewLine() { Add(
'\n'); }
8258 void AddNumber(uint32_t num);
8259 void AddNumber(uint64_t num);
8260 void AddPointer(
const void* ptr);
8263 VmaVector< char, VmaStlAllocator<char> > m_Data;
8266 void VmaStringBuilder::Add(
const char* pStr)
8268 const size_t strLen = strlen(pStr);
8271 const size_t oldCount = m_Data.size();
8272 m_Data.resize(oldCount + strLen);
8273 memcpy(m_Data.data() + oldCount, pStr, strLen);
8277 void VmaStringBuilder::AddNumber(uint32_t num)
8284 *--p =
'0' + (num % 10);
8291 void VmaStringBuilder::AddNumber(uint64_t num)
8298 *--p =
'0' + (num % 10);
8305 void VmaStringBuilder::AddPointer(
const void* ptr)
8308 VmaPtrToStr(buf,
sizeof(buf), ptr);
8312 #endif // #if VMA_STATS_STRING_ENABLED
8317 #if VMA_STATS_STRING_ENABLED
8321 VMA_CLASS_NO_COPY(VmaJsonWriter)
8323 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8326 void BeginObject(
bool singleLine =
false);
8329 void BeginArray(
bool singleLine =
false);
8332 void WriteString(
const char* pStr);
8333 void BeginString(
const char* pStr = VMA_NULL);
8334 void ContinueString(
const char* pStr);
8335 void ContinueString(uint32_t n);
8336 void ContinueString(uint64_t n);
8337 void ContinueString_Pointer(
const void* ptr);
8338 void EndString(
const char* pStr = VMA_NULL);
8340 void WriteNumber(uint32_t n);
8341 void WriteNumber(uint64_t n);
8342 void WriteBool(
bool b);
8346 static const char*
const INDENT;
8348 enum COLLECTION_TYPE
8350 COLLECTION_TYPE_OBJECT,
8351 COLLECTION_TYPE_ARRAY,
8355 COLLECTION_TYPE type;
8356 uint32_t valueCount;
8357 bool singleLineMode;
8360 VmaStringBuilder& m_SB;
8361 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8362 bool m_InsideString;
8364 void BeginValue(
bool isString);
8365 void WriteIndent(
bool oneLess =
false);
8368 const char*
const VmaJsonWriter::INDENT =
" ";
8370 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8372 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8373 m_InsideString(false)
8377 VmaJsonWriter::~VmaJsonWriter()
8379 VMA_ASSERT(!m_InsideString);
8380 VMA_ASSERT(m_Stack.empty());
8383 void VmaJsonWriter::BeginObject(
bool singleLine)
8385 VMA_ASSERT(!m_InsideString);
8391 item.type = COLLECTION_TYPE_OBJECT;
8392 item.valueCount = 0;
8393 item.singleLineMode = singleLine;
8394 m_Stack.push_back(item);
8397 void VmaJsonWriter::EndObject()
8399 VMA_ASSERT(!m_InsideString);
8404 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8408 void VmaJsonWriter::BeginArray(
bool singleLine)
8410 VMA_ASSERT(!m_InsideString);
8416 item.type = COLLECTION_TYPE_ARRAY;
8417 item.valueCount = 0;
8418 item.singleLineMode = singleLine;
8419 m_Stack.push_back(item);
8422 void VmaJsonWriter::EndArray()
8424 VMA_ASSERT(!m_InsideString);
8429 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8433 void VmaJsonWriter::WriteString(
const char* pStr)
8439 void VmaJsonWriter::BeginString(
const char* pStr)
8441 VMA_ASSERT(!m_InsideString);
8445 m_InsideString =
true;
8446 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8448 ContinueString(pStr);
8452 void VmaJsonWriter::ContinueString(
const char* pStr)
8454 VMA_ASSERT(m_InsideString);
8456 const size_t strLen = strlen(pStr);
8457 for(
size_t i = 0; i < strLen; ++i)
8490 VMA_ASSERT(0 &&
"Character not currently supported.");
8496 void VmaJsonWriter::ContinueString(uint32_t n)
8498 VMA_ASSERT(m_InsideString);
8502 void VmaJsonWriter::ContinueString(uint64_t n)
8504 VMA_ASSERT(m_InsideString);
8508 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8510 VMA_ASSERT(m_InsideString);
8511 m_SB.AddPointer(ptr);
8514 void VmaJsonWriter::EndString(
const char* pStr)
8516 VMA_ASSERT(m_InsideString);
8517 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8519 ContinueString(pStr);
8522 m_InsideString =
false;
8525 void VmaJsonWriter::WriteNumber(uint32_t n)
8527 VMA_ASSERT(!m_InsideString);
8532 void VmaJsonWriter::WriteNumber(uint64_t n)
8534 VMA_ASSERT(!m_InsideString);
8539 void VmaJsonWriter::WriteBool(
bool b)
8541 VMA_ASSERT(!m_InsideString);
8543 m_SB.Add(b ?
"true" :
"false");
8546 void VmaJsonWriter::WriteNull()
8548 VMA_ASSERT(!m_InsideString);
8553 void VmaJsonWriter::BeginValue(
bool isString)
8555 if(!m_Stack.empty())
8557 StackItem& currItem = m_Stack.back();
8558 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8559 currItem.valueCount % 2 == 0)
8561 VMA_ASSERT(isString);
8564 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8565 currItem.valueCount % 2 != 0)
8569 else if(currItem.valueCount > 0)
8578 ++currItem.valueCount;
8582 void VmaJsonWriter::WriteIndent(
bool oneLess)
8584 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8588 size_t count = m_Stack.size();
8589 if(count > 0 && oneLess)
8593 for(
size_t i = 0; i < count; ++i)
8600 #endif // #if VMA_STATS_STRING_ENABLED
8604 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8606 if(IsUserDataString())
8608 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8610 FreeUserDataString(hAllocator);
8612 if(pUserData != VMA_NULL)
8614 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8619 m_pUserData = pUserData;
8623 void VmaAllocation_T::ChangeBlockAllocation(
8625 VmaDeviceMemoryBlock* block,
8626 VkDeviceSize offset)
8628 VMA_ASSERT(block != VMA_NULL);
8629 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8632 if(block != m_BlockAllocation.m_Block)
8634 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8635 if(IsPersistentMap())
8637 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8638 block->Map(hAllocator, mapRefCount, VMA_NULL);
8641 m_BlockAllocation.m_Block = block;
8642 m_BlockAllocation.m_Offset = offset;
8645 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8647 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8648 m_BlockAllocation.m_Offset = newOffset;
8651 VkDeviceSize VmaAllocation_T::GetOffset()
const
8655 case ALLOCATION_TYPE_BLOCK:
8656 return m_BlockAllocation.m_Offset;
8657 case ALLOCATION_TYPE_DEDICATED:
8665 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8669 case ALLOCATION_TYPE_BLOCK:
8670 return m_BlockAllocation.m_Block->GetDeviceMemory();
8671 case ALLOCATION_TYPE_DEDICATED:
8672 return m_DedicatedAllocation.m_hMemory;
8675 return VK_NULL_HANDLE;
8679 void* VmaAllocation_T::GetMappedData()
const
8683 case ALLOCATION_TYPE_BLOCK:
8686 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8687 VMA_ASSERT(pBlockData != VMA_NULL);
8688 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8695 case ALLOCATION_TYPE_DEDICATED:
8696 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8697 return m_DedicatedAllocation.m_pMappedData;
8704 bool VmaAllocation_T::CanBecomeLost()
const
8708 case ALLOCATION_TYPE_BLOCK:
8709 return m_BlockAllocation.m_CanBecomeLost;
8710 case ALLOCATION_TYPE_DEDICATED:
8718 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8720 VMA_ASSERT(CanBecomeLost());
8726 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8729 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8734 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8740 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8750 #if VMA_STATS_STRING_ENABLED
8753 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8762 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8764 json.WriteString(
"Type");
8765 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8767 json.WriteString(
"Size");
8768 json.WriteNumber(m_Size);
8770 if(m_pUserData != VMA_NULL)
8772 json.WriteString(
"UserData");
8773 if(IsUserDataString())
8775 json.WriteString((
const char*)m_pUserData);
8780 json.ContinueString_Pointer(m_pUserData);
8785 json.WriteString(
"CreationFrameIndex");
8786 json.WriteNumber(m_CreationFrameIndex);
8788 json.WriteString(
"LastUseFrameIndex");
8789 json.WriteNumber(GetLastUseFrameIndex());
8791 if(m_BufferImageUsage != 0)
8793 json.WriteString(
"Usage");
8794 json.WriteNumber(m_BufferImageUsage);
8800 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8802 VMA_ASSERT(IsUserDataString());
8803 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8804 m_pUserData = VMA_NULL;
8807 void VmaAllocation_T::BlockAllocMap()
8809 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8811 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8817 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8821 void VmaAllocation_T::BlockAllocUnmap()
8823 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8825 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8831 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8835 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8837 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8841 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8843 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8844 *ppData = m_DedicatedAllocation.m_pMappedData;
8850 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8851 return VK_ERROR_MEMORY_MAP_FAILED;
8856 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8857 hAllocator->m_hDevice,
8858 m_DedicatedAllocation.m_hMemory,
8863 if(result == VK_SUCCESS)
8865 m_DedicatedAllocation.m_pMappedData = *ppData;
8872 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8874 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8876 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8881 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8882 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8883 hAllocator->m_hDevice,
8884 m_DedicatedAllocation.m_hMemory);
8889 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8893 #if VMA_STATS_STRING_ENABLED
8895 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8899 json.WriteString(
"Blocks");
8902 json.WriteString(
"Allocations");
8905 json.WriteString(
"UnusedRanges");
8908 json.WriteString(
"UsedBytes");
8911 json.WriteString(
"UnusedBytes");
8916 json.WriteString(
"AllocationSize");
8917 json.BeginObject(
true);
8918 json.WriteString(
"Min");
8920 json.WriteString(
"Avg");
8922 json.WriteString(
"Max");
8929 json.WriteString(
"UnusedRangeSize");
8930 json.BeginObject(
true);
8931 json.WriteString(
"Min");
8933 json.WriteString(
"Avg");
8935 json.WriteString(
"Max");
8943 #endif // #if VMA_STATS_STRING_ENABLED
8945 struct VmaSuballocationItemSizeLess
8948 const VmaSuballocationList::iterator lhs,
8949 const VmaSuballocationList::iterator rhs)
const
8951 return lhs->size < rhs->size;
8954 const VmaSuballocationList::iterator lhs,
8955 VkDeviceSize rhsSize)
const
8957 return lhs->size < rhsSize;
8965 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8967 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8971 #if VMA_STATS_STRING_ENABLED
8973 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8974 VkDeviceSize unusedBytes,
8975 size_t allocationCount,
8976 size_t unusedRangeCount)
const
8980 json.WriteString(
"TotalBytes");
8981 json.WriteNumber(GetSize());
8983 json.WriteString(
"UnusedBytes");
8984 json.WriteNumber(unusedBytes);
8986 json.WriteString(
"Allocations");
8987 json.WriteNumber((uint64_t)allocationCount);
8989 json.WriteString(
"UnusedRanges");
8990 json.WriteNumber((uint64_t)unusedRangeCount);
8992 json.WriteString(
"Suballocations");
8996 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8997 VkDeviceSize offset,
9000 json.BeginObject(
true);
9002 json.WriteString(
"Offset");
9003 json.WriteNumber(offset);
9005 hAllocation->PrintParameters(json);
9010 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
9011 VkDeviceSize offset,
9012 VkDeviceSize size)
const
9014 json.BeginObject(
true);
9016 json.WriteString(
"Offset");
9017 json.WriteNumber(offset);
9019 json.WriteString(
"Type");
9020 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
9022 json.WriteString(
"Size");
9023 json.WriteNumber(size);
9028 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
9034 #endif // #if VMA_STATS_STRING_ENABLED
9039 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
9040 VmaBlockMetadata(hAllocator),
9043 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9044 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9048 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9052 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9054 VmaBlockMetadata::Init(size);
9057 m_SumFreeSize = size;
9059 VmaSuballocation suballoc = {};
9060 suballoc.offset = 0;
9061 suballoc.size = size;
9062 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9063 suballoc.hAllocation = VK_NULL_HANDLE;
9065 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9066 m_Suballocations.push_back(suballoc);
9067 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9069 m_FreeSuballocationsBySize.push_back(suballocItem);
9072 bool VmaBlockMetadata_Generic::Validate()
const
9074 VMA_VALIDATE(!m_Suballocations.empty());
9077 VkDeviceSize calculatedOffset = 0;
9079 uint32_t calculatedFreeCount = 0;
9081 VkDeviceSize calculatedSumFreeSize = 0;
9084 size_t freeSuballocationsToRegister = 0;
9086 bool prevFree =
false;
9088 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9089 suballocItem != m_Suballocations.cend();
9092 const VmaSuballocation& subAlloc = *suballocItem;
9095 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9097 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9099 VMA_VALIDATE(!prevFree || !currFree);
9101 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9105 calculatedSumFreeSize += subAlloc.size;
9106 ++calculatedFreeCount;
9107 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9109 ++freeSuballocationsToRegister;
9113 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9117 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9118 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9121 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9124 calculatedOffset += subAlloc.size;
9125 prevFree = currFree;
9130 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9132 VkDeviceSize lastSize = 0;
9133 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9135 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9138 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9140 VMA_VALIDATE(suballocItem->size >= lastSize);
9142 lastSize = suballocItem->size;
9146 VMA_VALIDATE(ValidateFreeSuballocationList());
9147 VMA_VALIDATE(calculatedOffset == GetSize());
9148 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9149 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9154 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9156 if(!m_FreeSuballocationsBySize.empty())
9158 return m_FreeSuballocationsBySize.back()->size;
9166 bool VmaBlockMetadata_Generic::IsEmpty()
const
9168 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9171 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9175 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9187 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9188 suballocItem != m_Suballocations.cend();
9191 const VmaSuballocation& suballoc = *suballocItem;
9192 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9205 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9207 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9209 inoutStats.
size += GetSize();
9216 #if VMA_STATS_STRING_ENABLED
9218 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9220 PrintDetailedMap_Begin(json,
9222 m_Suballocations.size() - (
size_t)m_FreeCount,
9226 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9227 suballocItem != m_Suballocations.cend();
9228 ++suballocItem, ++i)
9230 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9232 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9236 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9240 PrintDetailedMap_End(json);
9243 #endif // #if VMA_STATS_STRING_ENABLED
9245 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9246 uint32_t currentFrameIndex,
9247 uint32_t frameInUseCount,
9248 VkDeviceSize bufferImageGranularity,
9249 VkDeviceSize allocSize,
9250 VkDeviceSize allocAlignment,
9252 VmaSuballocationType allocType,
9253 bool canMakeOtherLost,
9255 VmaAllocationRequest* pAllocationRequest)
9257 VMA_ASSERT(allocSize > 0);
9258 VMA_ASSERT(!upperAddress);
9259 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9260 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9261 VMA_HEAVY_ASSERT(Validate());
9263 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9266 if(canMakeOtherLost ==
false &&
9267 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9273 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9274 if(freeSuballocCount > 0)
9279 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9280 m_FreeSuballocationsBySize.data(),
9281 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9282 allocSize + 2 * VMA_DEBUG_MARGIN,
9283 VmaSuballocationItemSizeLess());
9284 size_t index = it - m_FreeSuballocationsBySize.data();
9285 for(; index < freeSuballocCount; ++index)
9290 bufferImageGranularity,
9294 m_FreeSuballocationsBySize[index],
9296 &pAllocationRequest->offset,
9297 &pAllocationRequest->itemsToMakeLostCount,
9298 &pAllocationRequest->sumFreeSize,
9299 &pAllocationRequest->sumItemSize))
9301 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9306 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9308 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9309 it != m_Suballocations.end();
9312 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9315 bufferImageGranularity,
9321 &pAllocationRequest->offset,
9322 &pAllocationRequest->itemsToMakeLostCount,
9323 &pAllocationRequest->sumFreeSize,
9324 &pAllocationRequest->sumItemSize))
9326 pAllocationRequest->item = it;
9334 for(
size_t index = freeSuballocCount; index--; )
9339 bufferImageGranularity,
9343 m_FreeSuballocationsBySize[index],
9345 &pAllocationRequest->offset,
9346 &pAllocationRequest->itemsToMakeLostCount,
9347 &pAllocationRequest->sumFreeSize,
9348 &pAllocationRequest->sumItemSize))
9350 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9357 if(canMakeOtherLost)
9362 VmaAllocationRequest tmpAllocRequest = {};
9363 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9364 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9365 suballocIt != m_Suballocations.end();
9368 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9369 suballocIt->hAllocation->CanBecomeLost())
9374 bufferImageGranularity,
9380 &tmpAllocRequest.offset,
9381 &tmpAllocRequest.itemsToMakeLostCount,
9382 &tmpAllocRequest.sumFreeSize,
9383 &tmpAllocRequest.sumItemSize))
9387 *pAllocationRequest = tmpAllocRequest;
9388 pAllocationRequest->item = suballocIt;
9391 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9393 *pAllocationRequest = tmpAllocRequest;
9394 pAllocationRequest->item = suballocIt;
9407 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9408 uint32_t currentFrameIndex,
9409 uint32_t frameInUseCount,
9410 VmaAllocationRequest* pAllocationRequest)
9412 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9414 while(pAllocationRequest->itemsToMakeLostCount > 0)
9416 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9418 ++pAllocationRequest->item;
9420 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9421 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9422 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9423 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9425 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9426 --pAllocationRequest->itemsToMakeLostCount;
9434 VMA_HEAVY_ASSERT(Validate());
9435 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9436 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9441 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9443 uint32_t lostAllocationCount = 0;
9444 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9445 it != m_Suballocations.end();
9448 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9449 it->hAllocation->CanBecomeLost() &&
9450 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9452 it = FreeSuballocation(it);
9453 ++lostAllocationCount;
9456 return lostAllocationCount;
9459 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9461 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9462 it != m_Suballocations.end();
9465 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9467 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9469 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9470 return VK_ERROR_VALIDATION_FAILED_EXT;
9472 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9474 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9475 return VK_ERROR_VALIDATION_FAILED_EXT;
9483 void VmaBlockMetadata_Generic::Alloc(
9484 const VmaAllocationRequest& request,
9485 VmaSuballocationType type,
9486 VkDeviceSize allocSize,
9489 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9490 VMA_ASSERT(request.item != m_Suballocations.end());
9491 VmaSuballocation& suballoc = *request.item;
9493 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9495 VMA_ASSERT(request.offset >= suballoc.offset);
9496 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9497 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9498 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9502 UnregisterFreeSuballocation(request.item);
9504 suballoc.offset = request.offset;
9505 suballoc.size = allocSize;
9506 suballoc.type = type;
9507 suballoc.hAllocation = hAllocation;
9512 VmaSuballocation paddingSuballoc = {};
9513 paddingSuballoc.offset = request.offset + allocSize;
9514 paddingSuballoc.size = paddingEnd;
9515 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9516 VmaSuballocationList::iterator next = request.item;
9518 const VmaSuballocationList::iterator paddingEndItem =
9519 m_Suballocations.insert(next, paddingSuballoc);
9520 RegisterFreeSuballocation(paddingEndItem);
9526 VmaSuballocation paddingSuballoc = {};
9527 paddingSuballoc.offset = request.offset - paddingBegin;
9528 paddingSuballoc.size = paddingBegin;
9529 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9530 const VmaSuballocationList::iterator paddingBeginItem =
9531 m_Suballocations.insert(request.item, paddingSuballoc);
9532 RegisterFreeSuballocation(paddingBeginItem);
9536 m_FreeCount = m_FreeCount - 1;
9537 if(paddingBegin > 0)
9545 m_SumFreeSize -= allocSize;
9548 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9550 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9551 suballocItem != m_Suballocations.end();
9554 VmaSuballocation& suballoc = *suballocItem;
9555 if(suballoc.hAllocation == allocation)
9557 FreeSuballocation(suballocItem);
9558 VMA_HEAVY_ASSERT(Validate());
9562 VMA_ASSERT(0 &&
"Not found!");
9565 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9567 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9568 suballocItem != m_Suballocations.end();
9571 VmaSuballocation& suballoc = *suballocItem;
9572 if(suballoc.offset == offset)
9574 FreeSuballocation(suballocItem);
9578 VMA_ASSERT(0 &&
"Not found!");
9581 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9583 VkDeviceSize lastSize = 0;
9584 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9586 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9588 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9589 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9590 VMA_VALIDATE(it->size >= lastSize);
9591 lastSize = it->size;
9596 bool VmaBlockMetadata_Generic::CheckAllocation(
9597 uint32_t currentFrameIndex,
9598 uint32_t frameInUseCount,
9599 VkDeviceSize bufferImageGranularity,
9600 VkDeviceSize allocSize,
9601 VkDeviceSize allocAlignment,
9602 VmaSuballocationType allocType,
9603 VmaSuballocationList::const_iterator suballocItem,
9604 bool canMakeOtherLost,
9605 VkDeviceSize* pOffset,
9606 size_t* itemsToMakeLostCount,
9607 VkDeviceSize* pSumFreeSize,
9608 VkDeviceSize* pSumItemSize)
const
9610 VMA_ASSERT(allocSize > 0);
9611 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9612 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9613 VMA_ASSERT(pOffset != VMA_NULL);
9615 *itemsToMakeLostCount = 0;
9619 if(canMakeOtherLost)
9621 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9623 *pSumFreeSize = suballocItem->size;
9627 if(suballocItem->hAllocation->CanBecomeLost() &&
9628 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9630 ++*itemsToMakeLostCount;
9631 *pSumItemSize = suballocItem->size;
9640 if(GetSize() - suballocItem->offset < allocSize)
9646 *pOffset = suballocItem->offset;
9649 if(VMA_DEBUG_MARGIN > 0)
9651 *pOffset += VMA_DEBUG_MARGIN;
9655 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9659 if(bufferImageGranularity > 1)
9661 bool bufferImageGranularityConflict =
false;
9662 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9663 while(prevSuballocItem != m_Suballocations.cbegin())
9666 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9667 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9669 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9671 bufferImageGranularityConflict =
true;
9679 if(bufferImageGranularityConflict)
9681 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9687 if(*pOffset >= suballocItem->offset + suballocItem->size)
9693 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9696 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9698 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9700 if(suballocItem->offset + totalSize > GetSize())
9707 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9708 if(totalSize > suballocItem->size)
9710 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9711 while(remainingSize > 0)
9714 if(lastSuballocItem == m_Suballocations.cend())
9718 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9720 *pSumFreeSize += lastSuballocItem->size;
9724 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9725 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9726 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9728 ++*itemsToMakeLostCount;
9729 *pSumItemSize += lastSuballocItem->size;
9736 remainingSize = (lastSuballocItem->size < remainingSize) ?
9737 remainingSize - lastSuballocItem->size : 0;
9743 if(bufferImageGranularity > 1)
9745 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9747 while(nextSuballocItem != m_Suballocations.cend())
9749 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9750 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9752 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9754 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9755 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9756 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9758 ++*itemsToMakeLostCount;
9777 const VmaSuballocation& suballoc = *suballocItem;
9778 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9780 *pSumFreeSize = suballoc.size;
9783 if(suballoc.size < allocSize)
9789 *pOffset = suballoc.offset;
9792 if(VMA_DEBUG_MARGIN > 0)
9794 *pOffset += VMA_DEBUG_MARGIN;
9798 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9802 if(bufferImageGranularity > 1)
9804 bool bufferImageGranularityConflict =
false;
9805 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9806 while(prevSuballocItem != m_Suballocations.cbegin())
9809 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9810 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9812 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9814 bufferImageGranularityConflict =
true;
9822 if(bufferImageGranularityConflict)
9824 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9829 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9832 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9835 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9842 if(bufferImageGranularity > 1)
9844 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9846 while(nextSuballocItem != m_Suballocations.cend())
9848 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9849 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9851 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9870 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9872 VMA_ASSERT(item != m_Suballocations.end());
9873 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9875 VmaSuballocationList::iterator nextItem = item;
9877 VMA_ASSERT(nextItem != m_Suballocations.end());
9878 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9880 item->size += nextItem->size;
9882 m_Suballocations.erase(nextItem);
9885 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9888 VmaSuballocation& suballoc = *suballocItem;
9889 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9890 suballoc.hAllocation = VK_NULL_HANDLE;
9894 m_SumFreeSize += suballoc.size;
9897 bool mergeWithNext =
false;
9898 bool mergeWithPrev =
false;
9900 VmaSuballocationList::iterator nextItem = suballocItem;
9902 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9904 mergeWithNext =
true;
9907 VmaSuballocationList::iterator prevItem = suballocItem;
9908 if(suballocItem != m_Suballocations.begin())
9911 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9913 mergeWithPrev =
true;
9919 UnregisterFreeSuballocation(nextItem);
9920 MergeFreeWithNext(suballocItem);
9925 UnregisterFreeSuballocation(prevItem);
9926 MergeFreeWithNext(prevItem);
9927 RegisterFreeSuballocation(prevItem);
9932 RegisterFreeSuballocation(suballocItem);
9933 return suballocItem;
9937 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9939 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9940 VMA_ASSERT(item->size > 0);
9944 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9946 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9948 if(m_FreeSuballocationsBySize.empty())
9950 m_FreeSuballocationsBySize.push_back(item);
9954 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9962 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9964 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9965 VMA_ASSERT(item->size > 0);
9969 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9971 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9973 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9974 m_FreeSuballocationsBySize.data(),
9975 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9977 VmaSuballocationItemSizeLess());
9978 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9979 index < m_FreeSuballocationsBySize.size();
9982 if(m_FreeSuballocationsBySize[index] == item)
9984 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9987 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9989 VMA_ASSERT(0 &&
"Not found.");
9995 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9996 VkDeviceSize bufferImageGranularity,
9997 VmaSuballocationType& inOutPrevSuballocType)
const
9999 if(bufferImageGranularity == 1 || IsEmpty())
10004 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
10005 bool typeConflictFound =
false;
10006 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
10007 it != m_Suballocations.cend();
10010 const VmaSuballocationType suballocType = it->type;
10011 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
10013 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
10014 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
10016 typeConflictFound =
true;
10018 inOutPrevSuballocType = suballocType;
10022 return typeConflictFound || minAlignment >= bufferImageGranularity;
10028 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
10029 VmaBlockMetadata(hAllocator),
10031 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10032 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10033 m_1stVectorIndex(0),
10034 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
10035 m_1stNullItemsBeginCount(0),
10036 m_1stNullItemsMiddleCount(0),
10037 m_2ndNullItemsCount(0)
10041 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10045 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10047 VmaBlockMetadata::Init(size);
10048 m_SumFreeSize = size;
10051 bool VmaBlockMetadata_Linear::Validate()
const
10053 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10054 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10056 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10057 VMA_VALIDATE(!suballocations1st.empty() ||
10058 suballocations2nd.empty() ||
10059 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10061 if(!suballocations1st.empty())
10064 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10066 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10068 if(!suballocations2nd.empty())
10071 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10074 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10075 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10077 VkDeviceSize sumUsedSize = 0;
10078 const size_t suballoc1stCount = suballocations1st.size();
10079 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10081 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10083 const size_t suballoc2ndCount = suballocations2nd.size();
10084 size_t nullItem2ndCount = 0;
10085 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10087 const VmaSuballocation& suballoc = suballocations2nd[i];
10088 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10090 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10091 VMA_VALIDATE(suballoc.offset >= offset);
10095 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10096 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10097 sumUsedSize += suballoc.size;
10101 ++nullItem2ndCount;
10104 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10107 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10110 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10112 const VmaSuballocation& suballoc = suballocations1st[i];
10113 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10114 suballoc.hAllocation == VK_NULL_HANDLE);
10117 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10119 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10121 const VmaSuballocation& suballoc = suballocations1st[i];
10122 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10124 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10125 VMA_VALIDATE(suballoc.offset >= offset);
10126 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10130 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10131 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10132 sumUsedSize += suballoc.size;
10136 ++nullItem1stCount;
10139 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10141 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10143 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10145 const size_t suballoc2ndCount = suballocations2nd.size();
10146 size_t nullItem2ndCount = 0;
10147 for(
size_t i = suballoc2ndCount; i--; )
10149 const VmaSuballocation& suballoc = suballocations2nd[i];
10150 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10152 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10153 VMA_VALIDATE(suballoc.offset >= offset);
10157 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10158 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10159 sumUsedSize += suballoc.size;
10163 ++nullItem2ndCount;
10166 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10169 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10172 VMA_VALIDATE(offset <= GetSize());
10173 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10178 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10180 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10181 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10184 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10186 const VkDeviceSize size = GetSize();
10198 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10200 switch(m_2ndVectorMode)
10202 case SECOND_VECTOR_EMPTY:
10208 const size_t suballocations1stCount = suballocations1st.size();
10209 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10210 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10211 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10213 firstSuballoc.offset,
10214 size - (lastSuballoc.offset + lastSuballoc.size));
10218 case SECOND_VECTOR_RING_BUFFER:
10223 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10224 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10225 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10226 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10230 case SECOND_VECTOR_DOUBLE_STACK:
10235 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10236 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10237 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10238 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10248 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10250 const VkDeviceSize size = GetSize();
10251 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10252 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10253 const size_t suballoc1stCount = suballocations1st.size();
10254 const size_t suballoc2ndCount = suballocations2nd.size();
10265 VkDeviceSize lastOffset = 0;
10267 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10269 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10270 size_t nextAlloc2ndIndex = 0;
10271 while(lastOffset < freeSpace2ndTo1stEnd)
10274 while(nextAlloc2ndIndex < suballoc2ndCount &&
10275 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10277 ++nextAlloc2ndIndex;
10281 if(nextAlloc2ndIndex < suballoc2ndCount)
10283 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10286 if(lastOffset < suballoc.offset)
10289 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10303 lastOffset = suballoc.offset + suballoc.size;
10304 ++nextAlloc2ndIndex;
10310 if(lastOffset < freeSpace2ndTo1stEnd)
10312 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10320 lastOffset = freeSpace2ndTo1stEnd;
10325 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10326 const VkDeviceSize freeSpace1stTo2ndEnd =
10327 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10328 while(lastOffset < freeSpace1stTo2ndEnd)
10331 while(nextAlloc1stIndex < suballoc1stCount &&
10332 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10334 ++nextAlloc1stIndex;
10338 if(nextAlloc1stIndex < suballoc1stCount)
10340 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10343 if(lastOffset < suballoc.offset)
10346 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10360 lastOffset = suballoc.offset + suballoc.size;
10361 ++nextAlloc1stIndex;
10367 if(lastOffset < freeSpace1stTo2ndEnd)
10369 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10377 lastOffset = freeSpace1stTo2ndEnd;
10381 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10383 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10384 while(lastOffset < size)
10387 while(nextAlloc2ndIndex != SIZE_MAX &&
10388 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10390 --nextAlloc2ndIndex;
10394 if(nextAlloc2ndIndex != SIZE_MAX)
10396 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10399 if(lastOffset < suballoc.offset)
10402 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10416 lastOffset = suballoc.offset + suballoc.size;
10417 --nextAlloc2ndIndex;
10423 if(lastOffset < size)
10425 const VkDeviceSize unusedRangeSize = size - lastOffset;
10441 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10443 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10444 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10445 const VkDeviceSize size = GetSize();
10446 const size_t suballoc1stCount = suballocations1st.size();
10447 const size_t suballoc2ndCount = suballocations2nd.size();
10449 inoutStats.
size += size;
10451 VkDeviceSize lastOffset = 0;
10453 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10455 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10456 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10457 while(lastOffset < freeSpace2ndTo1stEnd)
10460 while(nextAlloc2ndIndex < suballoc2ndCount &&
10461 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10463 ++nextAlloc2ndIndex;
10467 if(nextAlloc2ndIndex < suballoc2ndCount)
10469 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10472 if(lastOffset < suballoc.offset)
10475 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10486 lastOffset = suballoc.offset + suballoc.size;
10487 ++nextAlloc2ndIndex;
10492 if(lastOffset < freeSpace2ndTo1stEnd)
10495 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10502 lastOffset = freeSpace2ndTo1stEnd;
10507 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10508 const VkDeviceSize freeSpace1stTo2ndEnd =
10509 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10510 while(lastOffset < freeSpace1stTo2ndEnd)
10513 while(nextAlloc1stIndex < suballoc1stCount &&
10514 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10516 ++nextAlloc1stIndex;
10520 if(nextAlloc1stIndex < suballoc1stCount)
10522 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10525 if(lastOffset < suballoc.offset)
10528 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10539 lastOffset = suballoc.offset + suballoc.size;
10540 ++nextAlloc1stIndex;
10545 if(lastOffset < freeSpace1stTo2ndEnd)
10548 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10555 lastOffset = freeSpace1stTo2ndEnd;
10559 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10561 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10562 while(lastOffset < size)
10565 while(nextAlloc2ndIndex != SIZE_MAX &&
10566 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10568 --nextAlloc2ndIndex;
10572 if(nextAlloc2ndIndex != SIZE_MAX)
10574 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10577 if(lastOffset < suballoc.offset)
10580 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10591 lastOffset = suballoc.offset + suballoc.size;
10592 --nextAlloc2ndIndex;
10597 if(lastOffset < size)
10600 const VkDeviceSize unusedRangeSize = size - lastOffset;
10613 #if VMA_STATS_STRING_ENABLED
10614 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10616 const VkDeviceSize size = GetSize();
10617 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10618 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10619 const size_t suballoc1stCount = suballocations1st.size();
10620 const size_t suballoc2ndCount = suballocations2nd.size();
10624 size_t unusedRangeCount = 0;
10625 VkDeviceSize usedBytes = 0;
10627 VkDeviceSize lastOffset = 0;
10629 size_t alloc2ndCount = 0;
10630 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10632 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10633 size_t nextAlloc2ndIndex = 0;
10634 while(lastOffset < freeSpace2ndTo1stEnd)
10637 while(nextAlloc2ndIndex < suballoc2ndCount &&
10638 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10640 ++nextAlloc2ndIndex;
10644 if(nextAlloc2ndIndex < suballoc2ndCount)
10646 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10649 if(lastOffset < suballoc.offset)
10652 ++unusedRangeCount;
10658 usedBytes += suballoc.size;
10661 lastOffset = suballoc.offset + suballoc.size;
10662 ++nextAlloc2ndIndex;
10667 if(lastOffset < freeSpace2ndTo1stEnd)
10670 ++unusedRangeCount;
10674 lastOffset = freeSpace2ndTo1stEnd;
10679 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10680 size_t alloc1stCount = 0;
10681 const VkDeviceSize freeSpace1stTo2ndEnd =
10682 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10683 while(lastOffset < freeSpace1stTo2ndEnd)
10686 while(nextAlloc1stIndex < suballoc1stCount &&
10687 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10689 ++nextAlloc1stIndex;
10693 if(nextAlloc1stIndex < suballoc1stCount)
10695 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10698 if(lastOffset < suballoc.offset)
10701 ++unusedRangeCount;
10707 usedBytes += suballoc.size;
10710 lastOffset = suballoc.offset + suballoc.size;
10711 ++nextAlloc1stIndex;
10716 if(lastOffset < size)
10719 ++unusedRangeCount;
10723 lastOffset = freeSpace1stTo2ndEnd;
10727 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10729 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10730 while(lastOffset < size)
10733 while(nextAlloc2ndIndex != SIZE_MAX &&
10734 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10736 --nextAlloc2ndIndex;
10740 if(nextAlloc2ndIndex != SIZE_MAX)
10742 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10745 if(lastOffset < suballoc.offset)
10748 ++unusedRangeCount;
10754 usedBytes += suballoc.size;
10757 lastOffset = suballoc.offset + suballoc.size;
10758 --nextAlloc2ndIndex;
10763 if(lastOffset < size)
10766 ++unusedRangeCount;
10775 const VkDeviceSize unusedBytes = size - usedBytes;
10776 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10781 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10783 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10784 size_t nextAlloc2ndIndex = 0;
10785 while(lastOffset < freeSpace2ndTo1stEnd)
10788 while(nextAlloc2ndIndex < suballoc2ndCount &&
10789 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10791 ++nextAlloc2ndIndex;
10795 if(nextAlloc2ndIndex < suballoc2ndCount)
10797 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10800 if(lastOffset < suballoc.offset)
10803 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10804 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10809 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10812 lastOffset = suballoc.offset + suballoc.size;
10813 ++nextAlloc2ndIndex;
10818 if(lastOffset < freeSpace2ndTo1stEnd)
10821 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10822 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10826 lastOffset = freeSpace2ndTo1stEnd;
10831 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10832 while(lastOffset < freeSpace1stTo2ndEnd)
10835 while(nextAlloc1stIndex < suballoc1stCount &&
10836 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10838 ++nextAlloc1stIndex;
10842 if(nextAlloc1stIndex < suballoc1stCount)
10844 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10847 if(lastOffset < suballoc.offset)
10850 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10851 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10856 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10859 lastOffset = suballoc.offset + suballoc.size;
10860 ++nextAlloc1stIndex;
10865 if(lastOffset < freeSpace1stTo2ndEnd)
10868 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10869 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10873 lastOffset = freeSpace1stTo2ndEnd;
10877 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10879 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10880 while(lastOffset < size)
10883 while(nextAlloc2ndIndex != SIZE_MAX &&
10884 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10886 --nextAlloc2ndIndex;
10890 if(nextAlloc2ndIndex != SIZE_MAX)
10892 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10895 if(lastOffset < suballoc.offset)
10898 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10899 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10904 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10907 lastOffset = suballoc.offset + suballoc.size;
10908 --nextAlloc2ndIndex;
10913 if(lastOffset < size)
10916 const VkDeviceSize unusedRangeSize = size - lastOffset;
10917 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10926 PrintDetailedMap_End(json);
10928 #endif // #if VMA_STATS_STRING_ENABLED
10930 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10931 uint32_t currentFrameIndex,
10932 uint32_t frameInUseCount,
10933 VkDeviceSize bufferImageGranularity,
10934 VkDeviceSize allocSize,
10935 VkDeviceSize allocAlignment,
10937 VmaSuballocationType allocType,
10938 bool canMakeOtherLost,
10940 VmaAllocationRequest* pAllocationRequest)
10942 VMA_ASSERT(allocSize > 0);
10943 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10944 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10945 VMA_HEAVY_ASSERT(Validate());
10946 return upperAddress ?
10947 CreateAllocationRequest_UpperAddress(
10948 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10949 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10950 CreateAllocationRequest_LowerAddress(
10951 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10952 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10955 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10956 uint32_t currentFrameIndex,
10957 uint32_t frameInUseCount,
10958 VkDeviceSize bufferImageGranularity,
10959 VkDeviceSize allocSize,
10960 VkDeviceSize allocAlignment,
10961 VmaSuballocationType allocType,
10962 bool canMakeOtherLost,
10964 VmaAllocationRequest* pAllocationRequest)
10966 const VkDeviceSize size = GetSize();
10967 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10968 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10970 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10972 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10977 if(allocSize > size)
10981 VkDeviceSize resultBaseOffset = size - allocSize;
10982 if(!suballocations2nd.empty())
10984 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10985 resultBaseOffset = lastSuballoc.offset - allocSize;
10986 if(allocSize > lastSuballoc.offset)
10993 VkDeviceSize resultOffset = resultBaseOffset;
10996 if(VMA_DEBUG_MARGIN > 0)
10998 if(resultOffset < VMA_DEBUG_MARGIN)
11002 resultOffset -= VMA_DEBUG_MARGIN;
11006 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
11010 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
11012 bool bufferImageGranularityConflict =
false;
11013 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11015 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11016 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11018 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
11020 bufferImageGranularityConflict =
true;
11028 if(bufferImageGranularityConflict)
11030 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
11035 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
11036 suballocations1st.back().offset + suballocations1st.back().size :
11038 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
11042 if(bufferImageGranularity > 1)
11044 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11046 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11047 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11049 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11063 pAllocationRequest->offset = resultOffset;
11064 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11065 pAllocationRequest->sumItemSize = 0;
11067 pAllocationRequest->itemsToMakeLostCount = 0;
11068 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11075 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11076 uint32_t currentFrameIndex,
11077 uint32_t frameInUseCount,
11078 VkDeviceSize bufferImageGranularity,
11079 VkDeviceSize allocSize,
11080 VkDeviceSize allocAlignment,
11081 VmaSuballocationType allocType,
11082 bool canMakeOtherLost,
11084 VmaAllocationRequest* pAllocationRequest)
11086 const VkDeviceSize size = GetSize();
11087 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11088 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11090 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11094 VkDeviceSize resultBaseOffset = 0;
11095 if(!suballocations1st.empty())
11097 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11098 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11102 VkDeviceSize resultOffset = resultBaseOffset;
11105 if(VMA_DEBUG_MARGIN > 0)
11107 resultOffset += VMA_DEBUG_MARGIN;
11111 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11115 if(bufferImageGranularity > 1 && !suballocations1st.empty())
11117 bool bufferImageGranularityConflict =
false;
11118 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11120 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11121 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11123 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11125 bufferImageGranularityConflict =
true;
11133 if(bufferImageGranularityConflict)
11135 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11139 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11140 suballocations2nd.back().offset : size;
11143 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11147 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11149 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11151 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11152 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11154 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11168 pAllocationRequest->offset = resultOffset;
11169 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11170 pAllocationRequest->sumItemSize = 0;
11172 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11173 pAllocationRequest->itemsToMakeLostCount = 0;
11180 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11182 VMA_ASSERT(!suballocations1st.empty());
11184 VkDeviceSize resultBaseOffset = 0;
11185 if(!suballocations2nd.empty())
11187 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11188 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11192 VkDeviceSize resultOffset = resultBaseOffset;
11195 if(VMA_DEBUG_MARGIN > 0)
11197 resultOffset += VMA_DEBUG_MARGIN;
11201 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11205 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
11207 bool bufferImageGranularityConflict =
false;
11208 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11210 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11211 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11213 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11215 bufferImageGranularityConflict =
true;
11223 if(bufferImageGranularityConflict)
11225 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11229 pAllocationRequest->itemsToMakeLostCount = 0;
11230 pAllocationRequest->sumItemSize = 0;
11231 size_t index1st = m_1stNullItemsBeginCount;
11233 if(canMakeOtherLost)
11235 while(index1st < suballocations1st.size() &&
11236 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11239 const VmaSuballocation& suballoc = suballocations1st[index1st];
11240 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11246 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11247 if(suballoc.hAllocation->CanBecomeLost() &&
11248 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11250 ++pAllocationRequest->itemsToMakeLostCount;
11251 pAllocationRequest->sumItemSize += suballoc.size;
11263 if(bufferImageGranularity > 1)
11265 while(index1st < suballocations1st.size())
11267 const VmaSuballocation& suballoc = suballocations1st[index1st];
11268 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11270 if(suballoc.hAllocation != VK_NULL_HANDLE)
11273 if(suballoc.hAllocation->CanBecomeLost() &&
11274 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11276 ++pAllocationRequest->itemsToMakeLostCount;
11277 pAllocationRequest->sumItemSize += suballoc.size;
11295 if(index1st == suballocations1st.size() &&
11296 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11299 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11304 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11305 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11309 if(bufferImageGranularity > 1)
11311 for(
size_t nextSuballocIndex = index1st;
11312 nextSuballocIndex < suballocations1st.size();
11313 nextSuballocIndex++)
11315 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11316 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11318 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11332 pAllocationRequest->offset = resultOffset;
11333 pAllocationRequest->sumFreeSize =
11334 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11336 - pAllocationRequest->sumItemSize;
11337 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11346 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11347 uint32_t currentFrameIndex,
11348 uint32_t frameInUseCount,
11349 VmaAllocationRequest* pAllocationRequest)
11351 if(pAllocationRequest->itemsToMakeLostCount == 0)
11356 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11359 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11360 size_t index = m_1stNullItemsBeginCount;
11361 size_t madeLostCount = 0;
11362 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11364 if(index == suballocations->size())
11368 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11370 suballocations = &AccessSuballocations2nd();
11374 VMA_ASSERT(!suballocations->empty());
11376 VmaSuballocation& suballoc = (*suballocations)[index];
11377 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11379 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11380 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11381 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11383 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11384 suballoc.hAllocation = VK_NULL_HANDLE;
11385 m_SumFreeSize += suballoc.size;
11386 if(suballocations == &AccessSuballocations1st())
11388 ++m_1stNullItemsMiddleCount;
11392 ++m_2ndNullItemsCount;
11404 CleanupAfterFree();
11410 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11412 uint32_t lostAllocationCount = 0;
11414 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11415 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11417 VmaSuballocation& suballoc = suballocations1st[i];
11418 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11419 suballoc.hAllocation->CanBecomeLost() &&
11420 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11422 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11423 suballoc.hAllocation = VK_NULL_HANDLE;
11424 ++m_1stNullItemsMiddleCount;
11425 m_SumFreeSize += suballoc.size;
11426 ++lostAllocationCount;
11430 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11431 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11433 VmaSuballocation& suballoc = suballocations2nd[i];
11434 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11435 suballoc.hAllocation->CanBecomeLost() &&
11436 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11438 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11439 suballoc.hAllocation = VK_NULL_HANDLE;
11440 ++m_2ndNullItemsCount;
11441 m_SumFreeSize += suballoc.size;
11442 ++lostAllocationCount;
11446 if(lostAllocationCount)
11448 CleanupAfterFree();
11451 return lostAllocationCount;
11454 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11456 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11457 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11459 const VmaSuballocation& suballoc = suballocations1st[i];
11460 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11462 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11464 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11465 return VK_ERROR_VALIDATION_FAILED_EXT;
11467 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11469 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11470 return VK_ERROR_VALIDATION_FAILED_EXT;
11475 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11476 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11478 const VmaSuballocation& suballoc = suballocations2nd[i];
11479 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11481 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11483 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11484 return VK_ERROR_VALIDATION_FAILED_EXT;
11486 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11488 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11489 return VK_ERROR_VALIDATION_FAILED_EXT;
11497 void VmaBlockMetadata_Linear::Alloc(
11498 const VmaAllocationRequest& request,
11499 VmaSuballocationType type,
11500 VkDeviceSize allocSize,
11503 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11505 switch(request.type)
11507 case VmaAllocationRequestType::UpperAddress:
11509 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11510 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11511 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11512 suballocations2nd.push_back(newSuballoc);
11513 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11516 case VmaAllocationRequestType::EndOf1st:
11518 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11520 VMA_ASSERT(suballocations1st.empty() ||
11521 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11523 VMA_ASSERT(request.offset + allocSize <= GetSize());
11525 suballocations1st.push_back(newSuballoc);
11528 case VmaAllocationRequestType::EndOf2nd:
11530 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11532 VMA_ASSERT(!suballocations1st.empty() &&
11533 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11534 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11536 switch(m_2ndVectorMode)
11538 case SECOND_VECTOR_EMPTY:
11540 VMA_ASSERT(suballocations2nd.empty());
11541 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11543 case SECOND_VECTOR_RING_BUFFER:
11545 VMA_ASSERT(!suballocations2nd.empty());
11547 case SECOND_VECTOR_DOUBLE_STACK:
11548 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11554 suballocations2nd.push_back(newSuballoc);
11558 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11561 m_SumFreeSize -= newSuballoc.size;
11564 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11566 FreeAtOffset(allocation->GetOffset());
11569 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11571 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11572 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11574 if(!suballocations1st.empty())
11577 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11578 if(firstSuballoc.offset == offset)
11580 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11581 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11582 m_SumFreeSize += firstSuballoc.size;
11583 ++m_1stNullItemsBeginCount;
11584 CleanupAfterFree();
11590 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11591 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11593 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11594 if(lastSuballoc.offset == offset)
11596 m_SumFreeSize += lastSuballoc.size;
11597 suballocations2nd.pop_back();
11598 CleanupAfterFree();
11603 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11605 VmaSuballocation& lastSuballoc = suballocations1st.back();
11606 if(lastSuballoc.offset == offset)
11608 m_SumFreeSize += lastSuballoc.size;
11609 suballocations1st.pop_back();
11610 CleanupAfterFree();
11617 VmaSuballocation refSuballoc;
11618 refSuballoc.offset = offset;
11620 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11621 suballocations1st.begin() + m_1stNullItemsBeginCount,
11622 suballocations1st.end(),
11624 VmaSuballocationOffsetLess());
11625 if(it != suballocations1st.end())
11627 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11628 it->hAllocation = VK_NULL_HANDLE;
11629 ++m_1stNullItemsMiddleCount;
11630 m_SumFreeSize += it->size;
11631 CleanupAfterFree();
11636 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11639 VmaSuballocation refSuballoc;
11640 refSuballoc.offset = offset;
11642 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11643 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11644 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11645 if(it != suballocations2nd.end())
11647 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11648 it->hAllocation = VK_NULL_HANDLE;
11649 ++m_2ndNullItemsCount;
11650 m_SumFreeSize += it->size;
11651 CleanupAfterFree();
11656 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11659 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11661 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11662 const size_t suballocCount = AccessSuballocations1st().size();
11663 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11666 void VmaBlockMetadata_Linear::CleanupAfterFree()
11668 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11669 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11673 suballocations1st.clear();
11674 suballocations2nd.clear();
11675 m_1stNullItemsBeginCount = 0;
11676 m_1stNullItemsMiddleCount = 0;
11677 m_2ndNullItemsCount = 0;
11678 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11682 const size_t suballoc1stCount = suballocations1st.size();
11683 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11684 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11687 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11688 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11690 ++m_1stNullItemsBeginCount;
11691 --m_1stNullItemsMiddleCount;
11695 while(m_1stNullItemsMiddleCount > 0 &&
11696 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11698 --m_1stNullItemsMiddleCount;
11699 suballocations1st.pop_back();
11703 while(m_2ndNullItemsCount > 0 &&
11704 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11706 --m_2ndNullItemsCount;
11707 suballocations2nd.pop_back();
11711 while(m_2ndNullItemsCount > 0 &&
11712 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11714 --m_2ndNullItemsCount;
11715 VmaVectorRemove(suballocations2nd, 0);
11718 if(ShouldCompact1st())
11720 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11721 size_t srcIndex = m_1stNullItemsBeginCount;
11722 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11724 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11728 if(dstIndex != srcIndex)
11730 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11734 suballocations1st.resize(nonNullItemCount);
11735 m_1stNullItemsBeginCount = 0;
11736 m_1stNullItemsMiddleCount = 0;
11740 if(suballocations2nd.empty())
11742 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11746 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11748 suballocations1st.clear();
11749 m_1stNullItemsBeginCount = 0;
11751 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11754 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11755 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11756 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11757 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11759 ++m_1stNullItemsBeginCount;
11760 --m_1stNullItemsMiddleCount;
11762 m_2ndNullItemsCount = 0;
11763 m_1stVectorIndex ^= 1;
11768 VMA_HEAVY_ASSERT(Validate());
11775 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11776 VmaBlockMetadata(hAllocator),
11778 m_AllocationCount(0),
11782 memset(m_FreeList, 0,
sizeof(m_FreeList));
11785 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11787 DeleteNode(m_Root);
11790 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11792 VmaBlockMetadata::Init(size);
11794 m_UsableSize = VmaPrevPow2(size);
11795 m_SumFreeSize = m_UsableSize;
11799 while(m_LevelCount < MAX_LEVELS &&
11800 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11805 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11806 rootNode->offset = 0;
11807 rootNode->type = Node::TYPE_FREE;
11808 rootNode->parent = VMA_NULL;
11809 rootNode->buddy = VMA_NULL;
11812 AddToFreeListFront(0, rootNode);
11815 bool VmaBlockMetadata_Buddy::Validate()
const
11818 ValidationContext ctx;
11819 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11821 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11823 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11824 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11827 for(uint32_t level = 0; level < m_LevelCount; ++level)
11829 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11830 m_FreeList[level].front->free.prev == VMA_NULL);
11832 for(Node* node = m_FreeList[level].front;
11834 node = node->free.next)
11836 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11838 if(node->free.next == VMA_NULL)
11840 VMA_VALIDATE(m_FreeList[level].back == node);
11844 VMA_VALIDATE(node->free.next->free.prev == node);
11850 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11852 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11858 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11860 for(uint32_t level = 0; level < m_LevelCount; ++level)
11862 if(m_FreeList[level].front != VMA_NULL)
11864 return LevelToNodeSize(level);
11870 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11872 const VkDeviceSize unusableSize = GetUnusableSize();
11883 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11885 if(unusableSize > 0)
11894 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11896 const VkDeviceSize unusableSize = GetUnusableSize();
11898 inoutStats.
size += GetSize();
11899 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11904 if(unusableSize > 0)
11911 #if VMA_STATS_STRING_ENABLED
11913 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11917 CalcAllocationStatInfo(stat);
11919 PrintDetailedMap_Begin(
11925 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11927 const VkDeviceSize unusableSize = GetUnusableSize();
11928 if(unusableSize > 0)
11930 PrintDetailedMap_UnusedRange(json,
11935 PrintDetailedMap_End(json);
11938 #endif // #if VMA_STATS_STRING_ENABLED
11940 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11941 uint32_t currentFrameIndex,
11942 uint32_t frameInUseCount,
11943 VkDeviceSize bufferImageGranularity,
11944 VkDeviceSize allocSize,
11945 VkDeviceSize allocAlignment,
11947 VmaSuballocationType allocType,
11948 bool canMakeOtherLost,
11950 VmaAllocationRequest* pAllocationRequest)
11952 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11956 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11957 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11958 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11960 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11961 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11964 if(allocSize > m_UsableSize)
11969 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11970 for(uint32_t level = targetLevel + 1; level--; )
11972 for(Node* freeNode = m_FreeList[level].front;
11973 freeNode != VMA_NULL;
11974 freeNode = freeNode->free.next)
11976 if(freeNode->offset % allocAlignment == 0)
11978 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11979 pAllocationRequest->offset = freeNode->offset;
11980 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11981 pAllocationRequest->sumItemSize = 0;
11982 pAllocationRequest->itemsToMakeLostCount = 0;
11983 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11992 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11993 uint32_t currentFrameIndex,
11994 uint32_t frameInUseCount,
11995 VmaAllocationRequest* pAllocationRequest)
12001 return pAllocationRequest->itemsToMakeLostCount == 0;
12004 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
12013 void VmaBlockMetadata_Buddy::Alloc(
12014 const VmaAllocationRequest& request,
12015 VmaSuballocationType type,
12016 VkDeviceSize allocSize,
12019 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
12021 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12022 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
12024 Node* currNode = m_FreeList[currLevel].front;
12025 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12026 while(currNode->offset != request.offset)
12028 currNode = currNode->free.next;
12029 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12033 while(currLevel < targetLevel)
12037 RemoveFromFreeList(currLevel, currNode);
12039 const uint32_t childrenLevel = currLevel + 1;
12042 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12043 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12045 leftChild->offset = currNode->offset;
12046 leftChild->type = Node::TYPE_FREE;
12047 leftChild->parent = currNode;
12048 leftChild->buddy = rightChild;
12050 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12051 rightChild->type = Node::TYPE_FREE;
12052 rightChild->parent = currNode;
12053 rightChild->buddy = leftChild;
12056 currNode->type = Node::TYPE_SPLIT;
12057 currNode->split.leftChild = leftChild;
12060 AddToFreeListFront(childrenLevel, rightChild);
12061 AddToFreeListFront(childrenLevel, leftChild);
12066 currNode = m_FreeList[currLevel].front;
12075 VMA_ASSERT(currLevel == targetLevel &&
12076 currNode != VMA_NULL &&
12077 currNode->type == Node::TYPE_FREE);
12078 RemoveFromFreeList(currLevel, currNode);
12081 currNode->type = Node::TYPE_ALLOCATION;
12082 currNode->allocation.alloc = hAllocation;
12084 ++m_AllocationCount;
12086 m_SumFreeSize -= allocSize;
12089 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12091 if(node->type == Node::TYPE_SPLIT)
12093 DeleteNode(node->split.leftChild->buddy);
12094 DeleteNode(node->split.leftChild);
12097 vma_delete(GetAllocationCallbacks(), node);
12100 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12102 VMA_VALIDATE(level < m_LevelCount);
12103 VMA_VALIDATE(curr->parent == parent);
12104 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12105 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12108 case Node::TYPE_FREE:
12110 ctx.calculatedSumFreeSize += levelNodeSize;
12111 ++ctx.calculatedFreeCount;
12113 case Node::TYPE_ALLOCATION:
12114 ++ctx.calculatedAllocationCount;
12115 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12116 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12118 case Node::TYPE_SPLIT:
12120 const uint32_t childrenLevel = level + 1;
12121 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12122 const Node*
const leftChild = curr->split.leftChild;
12123 VMA_VALIDATE(leftChild != VMA_NULL);
12124 VMA_VALIDATE(leftChild->offset == curr->offset);
12125 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12127 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12129 const Node*
const rightChild = leftChild->buddy;
12130 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12131 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12133 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12144 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12147 uint32_t level = 0;
12148 VkDeviceSize currLevelNodeSize = m_UsableSize;
12149 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12150 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12153 currLevelNodeSize = nextLevelNodeSize;
12154 nextLevelNodeSize = currLevelNodeSize >> 1;
12159 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12162 Node* node = m_Root;
12163 VkDeviceSize nodeOffset = 0;
12164 uint32_t level = 0;
12165 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12166 while(node->type == Node::TYPE_SPLIT)
12168 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12169 if(offset < nodeOffset + nextLevelSize)
12171 node = node->split.leftChild;
12175 node = node->split.leftChild->buddy;
12176 nodeOffset += nextLevelSize;
12179 levelNodeSize = nextLevelSize;
12182 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12183 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12186 --m_AllocationCount;
12187 m_SumFreeSize += alloc->GetSize();
12189 node->type = Node::TYPE_FREE;
12192 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12194 RemoveFromFreeList(level, node->buddy);
12195 Node*
const parent = node->parent;
12197 vma_delete(GetAllocationCallbacks(), node->buddy);
12198 vma_delete(GetAllocationCallbacks(), node);
12199 parent->type = Node::TYPE_FREE;
12207 AddToFreeListFront(level, node);
12210 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12214 case Node::TYPE_FREE:
12220 case Node::TYPE_ALLOCATION:
12222 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12228 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12229 if(unusedRangeSize > 0)
12238 case Node::TYPE_SPLIT:
12240 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12241 const Node*
const leftChild = node->split.leftChild;
12242 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12243 const Node*
const rightChild = leftChild->buddy;
12244 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12252 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12254 VMA_ASSERT(node->type == Node::TYPE_FREE);
12257 Node*
const frontNode = m_FreeList[level].front;
12258 if(frontNode == VMA_NULL)
12260 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12261 node->free.prev = node->free.next = VMA_NULL;
12262 m_FreeList[level].front = m_FreeList[level].back = node;
12266 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12267 node->free.prev = VMA_NULL;
12268 node->free.next = frontNode;
12269 frontNode->free.prev = node;
12270 m_FreeList[level].front = node;
12274 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12276 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12279 if(node->free.prev == VMA_NULL)
12281 VMA_ASSERT(m_FreeList[level].front == node);
12282 m_FreeList[level].front = node->free.next;
12286 Node*
const prevFreeNode = node->free.prev;
12287 VMA_ASSERT(prevFreeNode->free.next == node);
12288 prevFreeNode->free.next = node->free.next;
12292 if(node->free.next == VMA_NULL)
12294 VMA_ASSERT(m_FreeList[level].back == node);
12295 m_FreeList[level].back = node->free.prev;
12299 Node*
const nextFreeNode = node->free.next;
12300 VMA_ASSERT(nextFreeNode->free.prev == node);
12301 nextFreeNode->free.prev = node->free.prev;
12305 #if VMA_STATS_STRING_ENABLED
12306 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12310 case Node::TYPE_FREE:
12311 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12313 case Node::TYPE_ALLOCATION:
12315 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12316 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12317 if(allocSize < levelNodeSize)
12319 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12323 case Node::TYPE_SPLIT:
12325 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12326 const Node*
const leftChild = node->split.leftChild;
12327 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12328 const Node*
const rightChild = leftChild->buddy;
12329 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12336 #endif // #if VMA_STATS_STRING_ENABLED
12342 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12343 m_pMetadata(VMA_NULL),
12344 m_MemoryTypeIndex(UINT32_MAX),
12346 m_hMemory(VK_NULL_HANDLE),
12348 m_pMappedData(VMA_NULL)
12352 void VmaDeviceMemoryBlock::Init(
12355 uint32_t newMemoryTypeIndex,
12356 VkDeviceMemory newMemory,
12357 VkDeviceSize newSize,
12359 uint32_t algorithm)
12361 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12363 m_hParentPool = hParentPool;
12364 m_MemoryTypeIndex = newMemoryTypeIndex;
12366 m_hMemory = newMemory;
12371 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12374 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12380 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12382 m_pMetadata->Init(newSize);
12385 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12389 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12391 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12392 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12393 m_hMemory = VK_NULL_HANDLE;
12395 vma_delete(allocator, m_pMetadata);
12396 m_pMetadata = VMA_NULL;
12399 bool VmaDeviceMemoryBlock::Validate()
const
12401 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12402 (m_pMetadata->GetSize() != 0));
12404 return m_pMetadata->Validate();
12407 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12409 void* pData =
nullptr;
12410 VkResult res = Map(hAllocator, 1, &pData);
12411 if(res != VK_SUCCESS)
12416 res = m_pMetadata->CheckCorruption(pData);
12418 Unmap(hAllocator, 1);
12423 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12430 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12431 if(m_MapCount != 0)
12433 m_MapCount += count;
12434 VMA_ASSERT(m_pMappedData != VMA_NULL);
12435 if(ppData != VMA_NULL)
12437 *ppData = m_pMappedData;
12443 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12444 hAllocator->m_hDevice,
12450 if(result == VK_SUCCESS)
12452 if(ppData != VMA_NULL)
12454 *ppData = m_pMappedData;
12456 m_MapCount = count;
12462 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12469 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12470 if(m_MapCount >= count)
12472 m_MapCount -= count;
12473 if(m_MapCount == 0)
12475 m_pMappedData = VMA_NULL;
12476 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12481 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12485 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12487 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12488 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12491 VkResult res = Map(hAllocator, 1, &pData);
12492 if(res != VK_SUCCESS)
12497 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12498 VmaWriteMagicValue(pData, allocOffset + allocSize);
12500 Unmap(hAllocator, 1);
12505 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12507 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12508 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12511 VkResult res = Map(hAllocator, 1, &pData);
12512 if(res != VK_SUCCESS)
12517 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12519 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12521 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12523 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12526 Unmap(hAllocator, 1);
12531 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12534 VkDeviceSize allocationLocalOffset,
12538 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12539 hAllocation->GetBlock() ==
this);
12540 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12541 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12542 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12544 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12545 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12548 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12551 VkDeviceSize allocationLocalOffset,
12555 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12556 hAllocation->GetBlock() ==
this);
12557 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12558 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12559 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12561 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12562 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12567 memset(&outInfo, 0,
sizeof(outInfo));
12586 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12594 VmaPool_T::VmaPool_T(
12597 VkDeviceSize preferredBlockSize) :
12601 createInfo.memoryTypeIndex,
12602 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12603 createInfo.minBlockCount,
12604 createInfo.maxBlockCount,
12606 createInfo.frameInUseCount,
12607 createInfo.blockSize != 0,
12609 createInfo.priority),
12615 VmaPool_T::~VmaPool_T()
12619 void VmaPool_T::SetName(
const char* pName)
12621 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12622 VmaFreeString(allocs, m_Name);
12624 if(pName != VMA_NULL)
12626 m_Name = VmaCreateStringCopy(allocs, pName);
12634 #if VMA_STATS_STRING_ENABLED
12636 #endif // #if VMA_STATS_STRING_ENABLED
12638 VmaBlockVector::VmaBlockVector(
12641 uint32_t memoryTypeIndex,
12642 VkDeviceSize preferredBlockSize,
12643 size_t minBlockCount,
12644 size_t maxBlockCount,
12645 VkDeviceSize bufferImageGranularity,
12646 uint32_t frameInUseCount,
12647 bool explicitBlockSize,
12648 uint32_t algorithm,
12650 m_hAllocator(hAllocator),
12651 m_hParentPool(hParentPool),
12652 m_MemoryTypeIndex(memoryTypeIndex),
12653 m_PreferredBlockSize(preferredBlockSize),
12654 m_MinBlockCount(minBlockCount),
12655 m_MaxBlockCount(maxBlockCount),
12656 m_BufferImageGranularity(bufferImageGranularity),
12657 m_FrameInUseCount(frameInUseCount),
12658 m_ExplicitBlockSize(explicitBlockSize),
12659 m_Algorithm(algorithm),
12660 m_Priority(priority),
12661 m_HasEmptyBlock(false),
12662 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12667 VmaBlockVector::~VmaBlockVector()
12669 for(
size_t i = m_Blocks.size(); i--; )
12671 m_Blocks[i]->Destroy(m_hAllocator);
12672 vma_delete(m_hAllocator, m_Blocks[i]);
12676 VkResult VmaBlockVector::CreateMinBlocks()
12678 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12680 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12681 if(res != VK_SUCCESS)
12689 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12691 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12693 const size_t blockCount = m_Blocks.size();
12702 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12704 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12705 VMA_ASSERT(pBlock);
12706 VMA_HEAVY_ASSERT(pBlock->Validate());
12707 pBlock->m_pMetadata->AddPoolStats(*pStats);
12711 bool VmaBlockVector::IsEmpty()
12713 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12714 return m_Blocks.empty();
12717 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12719 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12720 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12721 (VMA_DEBUG_MARGIN > 0) &&
12723 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12726 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12728 VkResult VmaBlockVector::Allocate(
12729 uint32_t currentFrameIndex,
12731 VkDeviceSize alignment,
12733 VmaSuballocationType suballocType,
12734 size_t allocationCount,
12738 VkResult res = VK_SUCCESS;
12740 if(IsCorruptionDetectionEnabled())
12742 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12743 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12747 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12748 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12750 res = AllocatePage(
12756 pAllocations + allocIndex);
12757 if(res != VK_SUCCESS)
12764 if(res != VK_SUCCESS)
12767 while(allocIndex--)
12769 Free(pAllocations[allocIndex]);
12771 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12777 VkResult VmaBlockVector::AllocatePage(
12778 uint32_t currentFrameIndex,
12780 VkDeviceSize alignment,
12782 VmaSuballocationType suballocType,
12790 VkDeviceSize freeMemory;
12792 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12794 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12798 const bool canFallbackToDedicated = !IsCustomPool();
12799 const bool canCreateNewBlock =
12801 (m_Blocks.size() < m_MaxBlockCount) &&
12802 (freeMemory >= size || !canFallbackToDedicated);
12809 canMakeOtherLost =
false;
12813 if(isUpperAddress &&
12816 return VK_ERROR_FEATURE_NOT_PRESENT;
12830 return VK_ERROR_FEATURE_NOT_PRESENT;
12834 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12836 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12844 if(!canMakeOtherLost || canCreateNewBlock)
12853 if(!m_Blocks.empty())
12855 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12856 VMA_ASSERT(pCurrBlock);
12857 VkResult res = AllocateFromBlock(
12867 if(res == VK_SUCCESS)
12869 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12879 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12881 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12882 VMA_ASSERT(pCurrBlock);
12883 VkResult res = AllocateFromBlock(
12893 if(res == VK_SUCCESS)
12895 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12903 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12905 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12906 VMA_ASSERT(pCurrBlock);
12907 VkResult res = AllocateFromBlock(
12917 if(res == VK_SUCCESS)
12919 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12927 if(canCreateNewBlock)
12930 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12931 uint32_t newBlockSizeShift = 0;
12932 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12934 if(!m_ExplicitBlockSize)
12937 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12938 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12940 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12941 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12943 newBlockSize = smallerNewBlockSize;
12944 ++newBlockSizeShift;
12953 size_t newBlockIndex = 0;
12954 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12955 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12957 if(!m_ExplicitBlockSize)
12959 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12961 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12962 if(smallerNewBlockSize >= size)
12964 newBlockSize = smallerNewBlockSize;
12965 ++newBlockSizeShift;
12966 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12967 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12976 if(res == VK_SUCCESS)
12978 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12979 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12981 res = AllocateFromBlock(
12991 if(res == VK_SUCCESS)
12993 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12999 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13006 if(canMakeOtherLost)
13008 uint32_t tryIndex = 0;
13009 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
13011 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
13012 VmaAllocationRequest bestRequest = {};
13013 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
13019 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13021 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13022 VMA_ASSERT(pCurrBlock);
13023 VmaAllocationRequest currRequest = {};
13024 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13027 m_BufferImageGranularity,
13036 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13037 if(pBestRequestBlock == VMA_NULL ||
13038 currRequestCost < bestRequestCost)
13040 pBestRequestBlock = pCurrBlock;
13041 bestRequest = currRequest;
13042 bestRequestCost = currRequestCost;
13044 if(bestRequestCost == 0)
13055 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13057 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13058 VMA_ASSERT(pCurrBlock);
13059 VmaAllocationRequest currRequest = {};
13060 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13063 m_BufferImageGranularity,
13072 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13073 if(pBestRequestBlock == VMA_NULL ||
13074 currRequestCost < bestRequestCost ||
13077 pBestRequestBlock = pCurrBlock;
13078 bestRequest = currRequest;
13079 bestRequestCost = currRequestCost;
13081 if(bestRequestCost == 0 ||
13091 if(pBestRequestBlock != VMA_NULL)
13095 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13096 if(res != VK_SUCCESS)
13102 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13108 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13109 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13110 UpdateHasEmptyBlock();
13111 (*pAllocation)->InitBlockAllocation(
13113 bestRequest.offset,
13120 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13121 VMA_DEBUG_LOG(
" Returned from existing block");
13122 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13123 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13124 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13126 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13128 if(IsCorruptionDetectionEnabled())
13130 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13131 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13146 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13148 return VK_ERROR_TOO_MANY_OBJECTS;
13152 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13155 void VmaBlockVector::Free(
13158 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13160 bool budgetExceeded =
false;
13162 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13164 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13165 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13170 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13172 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13174 if(IsCorruptionDetectionEnabled())
13176 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13177 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13180 if(hAllocation->IsPersistentMap())
13182 pBlock->Unmap(m_hAllocator, 1);
13185 pBlock->m_pMetadata->Free(hAllocation);
13186 VMA_HEAVY_ASSERT(pBlock->Validate());
13188 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13190 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13192 if(pBlock->m_pMetadata->IsEmpty())
13195 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13197 pBlockToDelete = pBlock;
13204 else if(m_HasEmptyBlock && canDeleteBlock)
13206 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13207 if(pLastBlock->m_pMetadata->IsEmpty())
13209 pBlockToDelete = pLastBlock;
13210 m_Blocks.pop_back();
13214 UpdateHasEmptyBlock();
13215 IncrementallySortBlocks();
13220 if(pBlockToDelete != VMA_NULL)
13222 VMA_DEBUG_LOG(
" Deleted empty block");
13223 pBlockToDelete->Destroy(m_hAllocator);
13224 vma_delete(m_hAllocator, pBlockToDelete);
13228 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13230 VkDeviceSize result = 0;
13231 for(
size_t i = m_Blocks.size(); i--; )
13233 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13234 if(result >= m_PreferredBlockSize)
13242 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13244 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13246 if(m_Blocks[blockIndex] == pBlock)
13248 VmaVectorRemove(m_Blocks, blockIndex);
13255 void VmaBlockVector::IncrementallySortBlocks()
13260 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13262 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13264 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13271 VkResult VmaBlockVector::AllocateFromBlock(
13272 VmaDeviceMemoryBlock* pBlock,
13273 uint32_t currentFrameIndex,
13275 VkDeviceSize alignment,
13278 VmaSuballocationType suballocType,
13287 VmaAllocationRequest currRequest = {};
13288 if(pBlock->m_pMetadata->CreateAllocationRequest(
13291 m_BufferImageGranularity,
13301 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13305 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13306 if(res != VK_SUCCESS)
13312 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13313 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13314 UpdateHasEmptyBlock();
13315 (*pAllocation)->InitBlockAllocation(
13317 currRequest.offset,
13324 VMA_HEAVY_ASSERT(pBlock->Validate());
13325 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13326 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13327 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13329 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13331 if(IsCorruptionDetectionEnabled())
13333 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13334 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13338 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13341 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13343 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13344 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13345 allocInfo.allocationSize = blockSize;
13347 #if VMA_BUFFER_DEVICE_ADDRESS
13349 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13350 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13352 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13353 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13355 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
13357 #if VMA_MEMORY_PRIORITY
13358 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
13359 if(m_hAllocator->m_UseExtMemoryPriority)
13361 priorityInfo.priority = m_Priority;
13362 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
13364 #endif // #if VMA_MEMORY_PRIORITY
13366 VkDeviceMemory mem = VK_NULL_HANDLE;
13367 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13376 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13382 allocInfo.allocationSize,
13386 m_Blocks.push_back(pBlock);
13387 if(pNewBlockIndex != VMA_NULL)
13389 *pNewBlockIndex = m_Blocks.size() - 1;
13395 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13396 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13397 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13399 const size_t blockCount = m_Blocks.size();
13400 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13404 BLOCK_FLAG_USED = 0x00000001,
13405 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13413 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13414 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13415 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13418 const size_t moveCount = moves.size();
13419 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13421 const VmaDefragmentationMove& move = moves[moveIndex];
13422 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13423 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13426 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13429 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13431 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13432 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13433 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13435 currBlockInfo.pMappedData = pBlock->GetMappedData();
13437 if(currBlockInfo.pMappedData == VMA_NULL)
13439 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13440 if(pDefragCtx->res == VK_SUCCESS)
13442 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13449 if(pDefragCtx->res == VK_SUCCESS)
13451 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13452 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13454 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13456 const VmaDefragmentationMove& move = moves[moveIndex];
13458 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13459 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13461 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13466 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13467 memRange.memory = pSrcBlock->GetDeviceMemory();
13468 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13469 memRange.size = VMA_MIN(
13470 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13471 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13472 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13477 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13478 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13479 static_cast<size_t>(move.size));
13481 if(IsCorruptionDetectionEnabled())
13483 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13484 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13490 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13491 memRange.memory = pDstBlock->GetDeviceMemory();
13492 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13493 memRange.size = VMA_MIN(
13494 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13495 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13496 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13503 for(
size_t blockIndex = blockCount; blockIndex--; )
13505 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13506 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13508 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13509 pBlock->Unmap(m_hAllocator, 1);
13514 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13515 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13516 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13517 VkCommandBuffer commandBuffer)
13519 const size_t blockCount = m_Blocks.size();
13521 pDefragCtx->blockContexts.resize(blockCount);
13522 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13525 const size_t moveCount = moves.size();
13526 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13528 const VmaDefragmentationMove& move = moves[moveIndex];
13533 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13534 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13538 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13542 VkBufferCreateInfo bufCreateInfo;
13543 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13545 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13547 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13548 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13549 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13551 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13552 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13553 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13554 if(pDefragCtx->res == VK_SUCCESS)
13556 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13557 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13564 if(pDefragCtx->res == VK_SUCCESS)
13566 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13568 const VmaDefragmentationMove& move = moves[moveIndex];
13570 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13571 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13573 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13575 VkBufferCopy region = {
13579 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13580 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13585 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13587 pDefragCtx->res = VK_NOT_READY;
13593 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13595 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13596 if(pBlock->m_pMetadata->IsEmpty())
13598 if(m_Blocks.size() > m_MinBlockCount)
13600 if(pDefragmentationStats != VMA_NULL)
13603 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13606 VmaVectorRemove(m_Blocks, blockIndex);
13607 pBlock->Destroy(m_hAllocator);
13608 vma_delete(m_hAllocator, pBlock);
13616 UpdateHasEmptyBlock();
13619 void VmaBlockVector::UpdateHasEmptyBlock()
13621 m_HasEmptyBlock =
false;
13622 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13624 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13625 if(pBlock->m_pMetadata->IsEmpty())
13627 m_HasEmptyBlock =
true;
13633 #if VMA_STATS_STRING_ENABLED
13635 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13637 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13639 json.BeginObject();
13643 const char* poolName = m_hParentPool->GetName();
13644 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13646 json.WriteString(
"Name");
13647 json.WriteString(poolName);
13650 json.WriteString(
"MemoryTypeIndex");
13651 json.WriteNumber(m_MemoryTypeIndex);
13653 json.WriteString(
"BlockSize");
13654 json.WriteNumber(m_PreferredBlockSize);
13656 json.WriteString(
"BlockCount");
13657 json.BeginObject(
true);
13658 if(m_MinBlockCount > 0)
13660 json.WriteString(
"Min");
13661 json.WriteNumber((uint64_t)m_MinBlockCount);
13663 if(m_MaxBlockCount < SIZE_MAX)
13665 json.WriteString(
"Max");
13666 json.WriteNumber((uint64_t)m_MaxBlockCount);
13668 json.WriteString(
"Cur");
13669 json.WriteNumber((uint64_t)m_Blocks.size());
13672 if(m_FrameInUseCount > 0)
13674 json.WriteString(
"FrameInUseCount");
13675 json.WriteNumber(m_FrameInUseCount);
13678 if(m_Algorithm != 0)
13680 json.WriteString(
"Algorithm");
13681 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13686 json.WriteString(
"PreferredBlockSize");
13687 json.WriteNumber(m_PreferredBlockSize);
13690 json.WriteString(
"Blocks");
13691 json.BeginObject();
13692 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13694 json.BeginString();
13695 json.ContinueString(m_Blocks[i]->GetId());
13698 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13705 #endif // #if VMA_STATS_STRING_ENABLED
13707 void VmaBlockVector::Defragment(
13708 class VmaBlockVectorDefragmentationContext* pCtx,
13710 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13711 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13712 VkCommandBuffer commandBuffer)
13714 pCtx->res = VK_SUCCESS;
13716 const VkMemoryPropertyFlags memPropFlags =
13717 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13718 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13720 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13722 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13723 !IsCorruptionDetectionEnabled() &&
13724 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13727 if(canDefragmentOnCpu || canDefragmentOnGpu)
13729 bool defragmentOnGpu;
13731 if(canDefragmentOnGpu != canDefragmentOnCpu)
13733 defragmentOnGpu = canDefragmentOnGpu;
13738 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13739 m_hAllocator->IsIntegratedGpu();
13742 bool overlappingMoveSupported = !defragmentOnGpu;
13744 if(m_hAllocator->m_UseMutex)
13748 if(!m_Mutex.TryLockWrite())
13750 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13756 m_Mutex.LockWrite();
13757 pCtx->mutexLocked =
true;
13761 pCtx->Begin(overlappingMoveSupported, flags);
13765 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13766 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13767 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13770 if(pStats != VMA_NULL)
13772 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13773 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13776 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13777 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13778 if(defragmentOnGpu)
13780 maxGpuBytesToMove -= bytesMoved;
13781 maxGpuAllocationsToMove -= allocationsMoved;
13785 maxCpuBytesToMove -= bytesMoved;
13786 maxCpuAllocationsToMove -= allocationsMoved;
13792 if(m_hAllocator->m_UseMutex)
13793 m_Mutex.UnlockWrite();
13795 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13796 pCtx->res = VK_NOT_READY;
13801 if(pCtx->res >= VK_SUCCESS)
13803 if(defragmentOnGpu)
13805 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13809 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13815 void VmaBlockVector::DefragmentationEnd(
13816 class VmaBlockVectorDefragmentationContext* pCtx,
13822 VMA_ASSERT(pCtx->mutexLocked ==
false);
13826 m_Mutex.LockWrite();
13827 pCtx->mutexLocked =
true;
13831 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13834 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13836 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13837 if(blockCtx.hBuffer)
13839 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13843 if(pCtx->res >= VK_SUCCESS)
13845 FreeEmptyBlocks(pStats);
13849 if(pCtx->mutexLocked)
13851 VMA_ASSERT(m_hAllocator->m_UseMutex);
13852 m_Mutex.UnlockWrite();
13856 uint32_t VmaBlockVector::ProcessDefragmentations(
13857 class VmaBlockVectorDefragmentationContext *pCtx,
13860 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13862 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13864 for(uint32_t i = 0; i < moveCount; ++ i)
13866 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13869 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13870 pMove->
offset = move.dstOffset;
13875 pCtx->defragmentationMovesProcessed += moveCount;
13880 void VmaBlockVector::CommitDefragmentations(
13881 class VmaBlockVectorDefragmentationContext *pCtx,
13884 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13886 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13888 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13890 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13891 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13894 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13895 FreeEmptyBlocks(pStats);
13898 size_t VmaBlockVector::CalcAllocationCount()
const
13901 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13903 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13908 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13910 if(m_BufferImageGranularity == 1)
13914 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13915 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13917 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13918 VMA_ASSERT(m_Algorithm == 0);
13919 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13920 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13928 void VmaBlockVector::MakePoolAllocationsLost(
13929 uint32_t currentFrameIndex,
13930 size_t* pLostAllocationCount)
13932 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13933 size_t lostAllocationCount = 0;
13934 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13936 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13937 VMA_ASSERT(pBlock);
13938 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13940 if(pLostAllocationCount != VMA_NULL)
13942 *pLostAllocationCount = lostAllocationCount;
13946 VkResult VmaBlockVector::CheckCorruption()
13948 if(!IsCorruptionDetectionEnabled())
13950 return VK_ERROR_FEATURE_NOT_PRESENT;
13953 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13954 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13956 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13957 VMA_ASSERT(pBlock);
13958 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13959 if(res != VK_SUCCESS)
13967 void VmaBlockVector::AddStats(
VmaStats* pStats)
13969 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13970 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13972 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13974 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13976 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13977 VMA_ASSERT(pBlock);
13978 VMA_HEAVY_ASSERT(pBlock->Validate());
13980 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13981 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13982 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13983 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13990 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13992 VmaBlockVector* pBlockVector,
13993 uint32_t currentFrameIndex,
13994 bool overlappingMoveSupported) :
13995 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13996 m_AllocationCount(0),
13997 m_AllAllocations(false),
13999 m_AllocationsMoved(0),
14000 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
14003 const size_t blockCount = m_pBlockVector->m_Blocks.size();
14004 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14006 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
14007 pBlockInfo->m_OriginalBlockIndex = blockIndex;
14008 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
14009 m_Blocks.push_back(pBlockInfo);
14013 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
14016 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
14018 for(
size_t i = m_Blocks.size(); i--; )
14020 vma_delete(m_hAllocator, m_Blocks[i]);
14024 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14027 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
14029 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
14030 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
14031 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
14033 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
14034 (*it)->m_Allocations.push_back(allocInfo);
14041 ++m_AllocationCount;
14045 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
14046 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14047 VkDeviceSize maxBytesToMove,
14048 uint32_t maxAllocationsToMove,
14049 bool freeOldAllocations)
14051 if(m_Blocks.empty())
14064 size_t srcBlockMinIndex = 0;
14077 size_t srcBlockIndex = m_Blocks.size() - 1;
14078 size_t srcAllocIndex = SIZE_MAX;
14084 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14086 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14089 if(srcBlockIndex == srcBlockMinIndex)
14096 srcAllocIndex = SIZE_MAX;
14101 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14105 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14106 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14108 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14109 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14110 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14111 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14114 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14116 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14117 VmaAllocationRequest dstAllocRequest;
14118 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14119 m_CurrentFrameIndex,
14120 m_pBlockVector->GetFrameInUseCount(),
14121 m_pBlockVector->GetBufferImageGranularity(),
14128 &dstAllocRequest) &&
14130 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14132 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14135 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14136 (m_BytesMoved + size > maxBytesToMove))
14141 VmaDefragmentationMove move = {};
14142 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14143 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14144 move.srcOffset = srcOffset;
14145 move.dstOffset = dstAllocRequest.offset;
14147 move.hAllocation = allocInfo.m_hAllocation;
14148 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14149 move.pDstBlock = pDstBlockInfo->m_pBlock;
14151 moves.push_back(move);
14153 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14157 allocInfo.m_hAllocation);
14159 if(freeOldAllocations)
14161 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14162 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14165 if(allocInfo.m_pChanged != VMA_NULL)
14167 *allocInfo.m_pChanged = VK_TRUE;
14170 ++m_AllocationsMoved;
14171 m_BytesMoved += size;
14173 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14181 if(srcAllocIndex > 0)
14187 if(srcBlockIndex > 0)
14190 srcAllocIndex = SIZE_MAX;
14200 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14203 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14205 if(m_Blocks[i]->m_HasNonMovableAllocations)
14213 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14214 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14215 VkDeviceSize maxBytesToMove,
14216 uint32_t maxAllocationsToMove,
14219 if(!m_AllAllocations && m_AllocationCount == 0)
14224 const size_t blockCount = m_Blocks.size();
14225 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14227 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14229 if(m_AllAllocations)
14231 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14232 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14233 it != pMetadata->m_Suballocations.end();
14236 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14238 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14239 pBlockInfo->m_Allocations.push_back(allocInfo);
14244 pBlockInfo->CalcHasNonMovableAllocations();
14248 pBlockInfo->SortAllocationsByOffsetDescending();
14254 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14257 const uint32_t roundCount = 2;
14260 VkResult result = VK_SUCCESS;
14261 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14269 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14270 size_t dstBlockIndex, VkDeviceSize dstOffset,
14271 size_t srcBlockIndex, VkDeviceSize srcOffset)
14273 if(dstBlockIndex < srcBlockIndex)
14277 if(dstBlockIndex > srcBlockIndex)
14281 if(dstOffset < srcOffset)
14291 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14293 VmaBlockVector* pBlockVector,
14294 uint32_t currentFrameIndex,
14295 bool overlappingMoveSupported) :
14296 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14297 m_OverlappingMoveSupported(overlappingMoveSupported),
14298 m_AllocationCount(0),
14299 m_AllAllocations(false),
14301 m_AllocationsMoved(0),
14302 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14304 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14308 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14312 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14313 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14314 VkDeviceSize maxBytesToMove,
14315 uint32_t maxAllocationsToMove,
14318 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14320 const size_t blockCount = m_pBlockVector->GetBlockCount();
14321 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14326 PreprocessMetadata();
14330 m_BlockInfos.resize(blockCount);
14331 for(
size_t i = 0; i < blockCount; ++i)
14333 m_BlockInfos[i].origBlockIndex = i;
14336 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14337 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14338 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14343 FreeSpaceDatabase freeSpaceDb;
14345 size_t dstBlockInfoIndex = 0;
14346 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14347 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14348 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14349 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14350 VkDeviceSize dstOffset = 0;
14353 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14355 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14356 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14357 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14358 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14359 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14361 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14362 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14363 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14364 if(m_AllocationsMoved == maxAllocationsToMove ||
14365 m_BytesMoved + srcAllocSize > maxBytesToMove)
14370 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14372 VmaDefragmentationMove move = {};
14374 size_t freeSpaceInfoIndex;
14375 VkDeviceSize dstAllocOffset;
14376 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14377 freeSpaceInfoIndex, dstAllocOffset))
14379 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14380 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14381 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14384 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14386 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14390 VmaSuballocation suballoc = *srcSuballocIt;
14391 suballoc.offset = dstAllocOffset;
14392 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14393 m_BytesMoved += srcAllocSize;
14394 ++m_AllocationsMoved;
14396 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14398 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14399 srcSuballocIt = nextSuballocIt;
14401 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14403 move.srcBlockIndex = srcOrigBlockIndex;
14404 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14405 move.srcOffset = srcAllocOffset;
14406 move.dstOffset = dstAllocOffset;
14407 move.size = srcAllocSize;
14409 moves.push_back(move);
14416 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14418 VmaSuballocation suballoc = *srcSuballocIt;
14419 suballoc.offset = dstAllocOffset;
14420 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14421 m_BytesMoved += srcAllocSize;
14422 ++m_AllocationsMoved;
14424 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14426 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14427 srcSuballocIt = nextSuballocIt;
14429 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14431 move.srcBlockIndex = srcOrigBlockIndex;
14432 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14433 move.srcOffset = srcAllocOffset;
14434 move.dstOffset = dstAllocOffset;
14435 move.size = srcAllocSize;
14437 moves.push_back(move);
14442 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14445 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14446 dstAllocOffset + srcAllocSize > dstBlockSize)
14449 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14451 ++dstBlockInfoIndex;
14452 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14453 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14454 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14455 dstBlockSize = pDstMetadata->GetSize();
14457 dstAllocOffset = 0;
14461 if(dstBlockInfoIndex == srcBlockInfoIndex)
14463 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14465 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14467 bool skipOver = overlap;
14468 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14472 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14477 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14479 dstOffset = srcAllocOffset + srcAllocSize;
14485 srcSuballocIt->offset = dstAllocOffset;
14486 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14487 dstOffset = dstAllocOffset + srcAllocSize;
14488 m_BytesMoved += srcAllocSize;
14489 ++m_AllocationsMoved;
14492 move.srcBlockIndex = srcOrigBlockIndex;
14493 move.dstBlockIndex = dstOrigBlockIndex;
14494 move.srcOffset = srcAllocOffset;
14495 move.dstOffset = dstAllocOffset;
14496 move.size = srcAllocSize;
14498 moves.push_back(move);
14506 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14507 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14509 VmaSuballocation suballoc = *srcSuballocIt;
14510 suballoc.offset = dstAllocOffset;
14511 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14512 dstOffset = dstAllocOffset + srcAllocSize;
14513 m_BytesMoved += srcAllocSize;
14514 ++m_AllocationsMoved;
14516 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14518 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14519 srcSuballocIt = nextSuballocIt;
14521 pDstMetadata->m_Suballocations.push_back(suballoc);
14523 move.srcBlockIndex = srcOrigBlockIndex;
14524 move.dstBlockIndex = dstOrigBlockIndex;
14525 move.srcOffset = srcAllocOffset;
14526 move.dstOffset = dstAllocOffset;
14527 move.size = srcAllocSize;
14529 moves.push_back(move);
14535 m_BlockInfos.clear();
14537 PostprocessMetadata();
14542 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14544 const size_t blockCount = m_pBlockVector->GetBlockCount();
14545 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14547 VmaBlockMetadata_Generic*
const pMetadata =
14548 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14549 pMetadata->m_FreeCount = 0;
14550 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14551 pMetadata->m_FreeSuballocationsBySize.clear();
14552 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14553 it != pMetadata->m_Suballocations.end(); )
14555 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14557 VmaSuballocationList::iterator nextIt = it;
14559 pMetadata->m_Suballocations.erase(it);
14570 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14572 const size_t blockCount = m_pBlockVector->GetBlockCount();
14573 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14575 VmaBlockMetadata_Generic*
const pMetadata =
14576 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14577 const VkDeviceSize blockSize = pMetadata->GetSize();
14580 if(pMetadata->m_Suballocations.empty())
14582 pMetadata->m_FreeCount = 1;
14584 VmaSuballocation suballoc = {
14588 VMA_SUBALLOCATION_TYPE_FREE };
14589 pMetadata->m_Suballocations.push_back(suballoc);
14590 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14595 VkDeviceSize offset = 0;
14596 VmaSuballocationList::iterator it;
14597 for(it = pMetadata->m_Suballocations.begin();
14598 it != pMetadata->m_Suballocations.end();
14601 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14602 VMA_ASSERT(it->offset >= offset);
14605 if(it->offset > offset)
14607 ++pMetadata->m_FreeCount;
14608 const VkDeviceSize freeSize = it->offset - offset;
14609 VmaSuballocation suballoc = {
14613 VMA_SUBALLOCATION_TYPE_FREE };
14614 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14615 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14617 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14621 pMetadata->m_SumFreeSize -= it->size;
14622 offset = it->offset + it->size;
14626 if(offset < blockSize)
14628 ++pMetadata->m_FreeCount;
14629 const VkDeviceSize freeSize = blockSize - offset;
14630 VmaSuballocation suballoc = {
14634 VMA_SUBALLOCATION_TYPE_FREE };
14635 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14636 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14637 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14639 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14644 pMetadata->m_FreeSuballocationsBySize.begin(),
14645 pMetadata->m_FreeSuballocationsBySize.end(),
14646 VmaSuballocationItemSizeLess());
14649 VMA_HEAVY_ASSERT(pMetadata->Validate());
14653 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14656 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14657 while(it != pMetadata->m_Suballocations.end())
14659 if(it->offset < suballoc.offset)
14664 pMetadata->m_Suballocations.insert(it, suballoc);
14670 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14673 VmaBlockVector* pBlockVector,
14674 uint32_t currFrameIndex) :
14676 mutexLocked(false),
14677 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14678 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14679 defragmentationMovesProcessed(0),
14680 defragmentationMovesCommitted(0),
14681 hasDefragmentationPlan(0),
14682 m_hAllocator(hAllocator),
14683 m_hCustomPool(hCustomPool),
14684 m_pBlockVector(pBlockVector),
14685 m_CurrFrameIndex(currFrameIndex),
14686 m_pAlgorithm(VMA_NULL),
14687 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14688 m_AllAllocations(false)
14692 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14694 vma_delete(m_hAllocator, m_pAlgorithm);
14697 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14699 AllocInfo info = { hAlloc, pChanged };
14700 m_Allocations.push_back(info);
14703 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14705 const bool allAllocations = m_AllAllocations ||
14706 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14719 if(VMA_DEBUG_MARGIN == 0 &&
14721 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14724 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14725 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14729 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14730 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14735 m_pAlgorithm->AddAll();
14739 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14741 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14749 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14751 uint32_t currFrameIndex,
14754 m_hAllocator(hAllocator),
14755 m_CurrFrameIndex(currFrameIndex),
14758 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14760 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14763 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14765 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14767 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14768 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14769 vma_delete(m_hAllocator, pBlockVectorCtx);
14771 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14773 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14774 if(pBlockVectorCtx)
14776 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14777 vma_delete(m_hAllocator, pBlockVectorCtx);
14782 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14784 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14786 VmaPool pool = pPools[poolIndex];
14789 if(pool->m_BlockVector.GetAlgorithm() == 0)
14791 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14793 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14795 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14797 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14802 if(!pBlockVectorDefragCtx)
14804 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14807 &pool->m_BlockVector,
14809 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14812 pBlockVectorDefragCtx->AddAll();
14817 void VmaDefragmentationContext_T::AddAllocations(
14818 uint32_t allocationCount,
14820 VkBool32* pAllocationsChanged)
14823 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14826 VMA_ASSERT(hAlloc);
14828 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14830 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14832 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14834 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14836 if(hAllocPool != VK_NULL_HANDLE)
14839 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14841 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14843 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14845 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14849 if(!pBlockVectorDefragCtx)
14851 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14854 &hAllocPool->m_BlockVector,
14856 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14863 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14864 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14865 if(!pBlockVectorDefragCtx)
14867 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14870 m_hAllocator->m_pBlockVectors[memTypeIndex],
14872 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14876 if(pBlockVectorDefragCtx)
14878 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14879 &pAllocationsChanged[allocIndex] : VMA_NULL;
14880 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14886 VkResult VmaDefragmentationContext_T::Defragment(
14887 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14888 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14900 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14901 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14903 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14904 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14906 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14907 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14910 return VK_NOT_READY;
14913 if(commandBuffer == VK_NULL_HANDLE)
14915 maxGpuBytesToMove = 0;
14916 maxGpuAllocationsToMove = 0;
14919 VkResult res = VK_SUCCESS;
14922 for(uint32_t memTypeIndex = 0;
14923 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14926 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14927 if(pBlockVectorCtx)
14929 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14930 pBlockVectorCtx->GetBlockVector()->Defragment(
14933 maxCpuBytesToMove, maxCpuAllocationsToMove,
14934 maxGpuBytesToMove, maxGpuAllocationsToMove,
14936 if(pBlockVectorCtx->res != VK_SUCCESS)
14938 res = pBlockVectorCtx->res;
14944 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14945 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14948 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14949 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14950 pBlockVectorCtx->GetBlockVector()->Defragment(
14953 maxCpuBytesToMove, maxCpuAllocationsToMove,
14954 maxGpuBytesToMove, maxGpuAllocationsToMove,
14956 if(pBlockVectorCtx->res != VK_SUCCESS)
14958 res = pBlockVectorCtx->res;
14971 for(uint32_t memTypeIndex = 0;
14972 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14975 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14976 if(pBlockVectorCtx)
14978 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14980 if(!pBlockVectorCtx->hasDefragmentationPlan)
14982 pBlockVectorCtx->GetBlockVector()->Defragment(
14985 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14986 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14989 if(pBlockVectorCtx->res < VK_SUCCESS)
14992 pBlockVectorCtx->hasDefragmentationPlan =
true;
14995 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14997 pCurrentMove, movesLeft);
14999 movesLeft -= processed;
15000 pCurrentMove += processed;
15005 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15006 customCtxIndex < customCtxCount;
15009 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15010 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15012 if(!pBlockVectorCtx->hasDefragmentationPlan)
15014 pBlockVectorCtx->GetBlockVector()->Defragment(
15017 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15018 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15021 if(pBlockVectorCtx->res < VK_SUCCESS)
15024 pBlockVectorCtx->hasDefragmentationPlan =
true;
15027 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15029 pCurrentMove, movesLeft);
15031 movesLeft -= processed;
15032 pCurrentMove += processed;
15039 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
15041 VkResult res = VK_SUCCESS;
15044 for(uint32_t memTypeIndex = 0;
15045 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15048 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15049 if(pBlockVectorCtx)
15051 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15053 if(!pBlockVectorCtx->hasDefragmentationPlan)
15055 res = VK_NOT_READY;
15059 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15060 pBlockVectorCtx, m_pStats);
15062 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15063 res = VK_NOT_READY;
15068 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15069 customCtxIndex < customCtxCount;
15072 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15073 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15075 if(!pBlockVectorCtx->hasDefragmentationPlan)
15077 res = VK_NOT_READY;
15081 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15082 pBlockVectorCtx, m_pStats);
15084 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15085 res = VK_NOT_READY;
15094 #if VMA_RECORDING_ENABLED
15096 VmaRecorder::VmaRecorder() :
15100 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15106 m_UseMutex = useMutex;
15107 m_Flags = settings.
flags;
15109 #if defined(_WIN32)
15111 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15115 return VK_ERROR_INITIALIZATION_FAILED;
15119 m_File = fopen(settings.
pFilePath,
"wb");
15123 return VK_ERROR_INITIALIZATION_FAILED;
15128 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15129 fprintf(m_File,
"%s\n",
"1,8");
15134 VmaRecorder::~VmaRecorder()
15136 if(m_File != VMA_NULL)
15142 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15144 CallParams callParams;
15145 GetBasicParams(callParams);
15147 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15148 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15152 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15154 CallParams callParams;
15155 GetBasicParams(callParams);
15157 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15158 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15164 CallParams callParams;
15165 GetBasicParams(callParams);
15167 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15168 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15179 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15181 CallParams callParams;
15182 GetBasicParams(callParams);
15184 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15185 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15190 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15191 const VkMemoryRequirements& vkMemReq,
15195 CallParams callParams;
15196 GetBasicParams(callParams);
15198 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15199 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15200 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15202 vkMemReq.alignment,
15203 vkMemReq.memoryTypeBits,
15211 userDataStr.GetString());
15215 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15216 const VkMemoryRequirements& vkMemReq,
15218 uint64_t allocationCount,
15221 CallParams callParams;
15222 GetBasicParams(callParams);
15224 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15225 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15226 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15228 vkMemReq.alignment,
15229 vkMemReq.memoryTypeBits,
15236 PrintPointerList(allocationCount, pAllocations);
15237 fprintf(m_File,
",%s\n", userDataStr.GetString());
15241 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15242 const VkMemoryRequirements& vkMemReq,
15243 bool requiresDedicatedAllocation,
15244 bool prefersDedicatedAllocation,
15248 CallParams callParams;
15249 GetBasicParams(callParams);
15251 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15252 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15253 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15255 vkMemReq.alignment,
15256 vkMemReq.memoryTypeBits,
15257 requiresDedicatedAllocation ? 1 : 0,
15258 prefersDedicatedAllocation ? 1 : 0,
15266 userDataStr.GetString());
15270 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15271 const VkMemoryRequirements& vkMemReq,
15272 bool requiresDedicatedAllocation,
15273 bool prefersDedicatedAllocation,
15277 CallParams callParams;
15278 GetBasicParams(callParams);
15280 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15281 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15282 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15284 vkMemReq.alignment,
15285 vkMemReq.memoryTypeBits,
15286 requiresDedicatedAllocation ? 1 : 0,
15287 prefersDedicatedAllocation ? 1 : 0,
15295 userDataStr.GetString());
15299 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15302 CallParams callParams;
15303 GetBasicParams(callParams);
15305 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15306 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15311 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15312 uint64_t allocationCount,
15315 CallParams callParams;
15316 GetBasicParams(callParams);
15318 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15319 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15320 PrintPointerList(allocationCount, pAllocations);
15321 fprintf(m_File,
"\n");
15325 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15327 const void* pUserData)
15329 CallParams callParams;
15330 GetBasicParams(callParams);
15332 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15333 UserDataString userDataStr(
15336 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15338 userDataStr.GetString());
15342 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15345 CallParams callParams;
15346 GetBasicParams(callParams);
15348 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15349 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15354 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15357 CallParams callParams;
15358 GetBasicParams(callParams);
15360 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15361 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15366 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15369 CallParams callParams;
15370 GetBasicParams(callParams);
15372 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15373 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15378 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15379 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15381 CallParams callParams;
15382 GetBasicParams(callParams);
15384 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15385 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15392 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15393 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15395 CallParams callParams;
15396 GetBasicParams(callParams);
15398 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15399 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15406 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15407 const VkBufferCreateInfo& bufCreateInfo,
15411 CallParams callParams;
15412 GetBasicParams(callParams);
15414 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15415 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15416 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15417 bufCreateInfo.flags,
15418 bufCreateInfo.size,
15419 bufCreateInfo.usage,
15420 bufCreateInfo.sharingMode,
15421 allocCreateInfo.
flags,
15422 allocCreateInfo.
usage,
15426 allocCreateInfo.
pool,
15428 userDataStr.GetString());
15432 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15433 const VkImageCreateInfo& imageCreateInfo,
15437 CallParams callParams;
15438 GetBasicParams(callParams);
15440 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15441 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15442 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15443 imageCreateInfo.flags,
15444 imageCreateInfo.imageType,
15445 imageCreateInfo.format,
15446 imageCreateInfo.extent.width,
15447 imageCreateInfo.extent.height,
15448 imageCreateInfo.extent.depth,
15449 imageCreateInfo.mipLevels,
15450 imageCreateInfo.arrayLayers,
15451 imageCreateInfo.samples,
15452 imageCreateInfo.tiling,
15453 imageCreateInfo.usage,
15454 imageCreateInfo.sharingMode,
15455 imageCreateInfo.initialLayout,
15456 allocCreateInfo.
flags,
15457 allocCreateInfo.
usage,
15461 allocCreateInfo.
pool,
15463 userDataStr.GetString());
15467 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15470 CallParams callParams;
15471 GetBasicParams(callParams);
15473 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15474 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15479 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15482 CallParams callParams;
15483 GetBasicParams(callParams);
15485 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15486 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15491 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15494 CallParams callParams;
15495 GetBasicParams(callParams);
15497 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15498 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15503 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15506 CallParams callParams;
15507 GetBasicParams(callParams);
15509 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15510 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15515 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15518 CallParams callParams;
15519 GetBasicParams(callParams);
15521 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15522 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15527 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15531 CallParams callParams;
15532 GetBasicParams(callParams);
15534 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15535 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15538 fprintf(m_File,
",");
15540 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15550 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15553 CallParams callParams;
15554 GetBasicParams(callParams);
15556 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15557 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15562 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15566 CallParams callParams;
15567 GetBasicParams(callParams);
15569 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15570 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15571 pool, name != VMA_NULL ? name :
"");
15577 if(pUserData != VMA_NULL)
15581 m_Str = (
const char*)pUserData;
15586 snprintf(m_PtrStr, 17,
"%p", pUserData);
15596 void VmaRecorder::WriteConfiguration(
15597 const VkPhysicalDeviceProperties& devProps,
15598 const VkPhysicalDeviceMemoryProperties& memProps,
15599 uint32_t vulkanApiVersion,
15600 bool dedicatedAllocationExtensionEnabled,
15601 bool bindMemory2ExtensionEnabled,
15602 bool memoryBudgetExtensionEnabled,
15603 bool deviceCoherentMemoryExtensionEnabled)
15605 fprintf(m_File,
"Config,Begin\n");
15607 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15609 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15610 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15611 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15612 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15613 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15614 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15616 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15617 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15618 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15620 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15621 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15623 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15624 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15626 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15627 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15629 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15630 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15633 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15634 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15635 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15636 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15638 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15639 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15640 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15641 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15642 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15643 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15644 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15645 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15646 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15648 fprintf(m_File,
"Config,End\n");
15651 void VmaRecorder::GetBasicParams(CallParams& outParams)
15653 #if defined(_WIN32)
15654 outParams.threadId = GetCurrentThreadId();
15659 std::thread::id thread_id = std::this_thread::get_id();
15660 std::stringstream thread_id_to_string_converter;
15661 thread_id_to_string_converter << thread_id;
15662 std::string thread_id_as_string = thread_id_to_string_converter.str();
15663 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15666 auto current_time = std::chrono::high_resolution_clock::now();
15668 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15671 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15675 fprintf(m_File,
"%p", pItems[0]);
15676 for(uint64_t i = 1; i < count; ++i)
15678 fprintf(m_File,
" %p", pItems[i]);
15683 void VmaRecorder::Flush()
15691 #endif // #if VMA_RECORDING_ENABLED
15696 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15697 m_Allocator(pAllocationCallbacks, 1024)
15701 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15703 VmaMutexLock mutexLock(m_Mutex);
15704 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15707 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15709 VmaMutexLock mutexLock(m_Mutex);
15710 m_Allocator.Free(hAlloc);
15718 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15725 m_hDevice(pCreateInfo->device),
15726 m_hInstance(pCreateInfo->instance),
15727 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15728 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15729 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15730 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15731 m_HeapSizeLimitMask(0),
15732 m_PreferredLargeHeapBlockSize(0),
15733 m_PhysicalDevice(pCreateInfo->physicalDevice),
15734 m_CurrentFrameIndex(0),
15735 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15736 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15738 m_GlobalMemoryTypeBits(UINT32_MAX)
15740 ,m_pRecorder(VMA_NULL)
15743 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15745 m_UseKhrDedicatedAllocation =
false;
15746 m_UseKhrBindMemory2 =
false;
15749 if(VMA_DEBUG_DETECT_CORRUPTION)
15752 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15757 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15759 #if !(VMA_DEDICATED_ALLOCATION)
15762 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15765 #if !(VMA_BIND_MEMORY2)
15768 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15772 #if !(VMA_MEMORY_BUDGET)
15775 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15778 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15779 if(m_UseKhrBufferDeviceAddress)
15781 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15784 #if VMA_VULKAN_VERSION < 1002000
15785 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15787 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15790 #if VMA_VULKAN_VERSION < 1001000
15791 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15793 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15796 #if !(VMA_MEMORY_PRIORITY)
15797 if(m_UseExtMemoryPriority)
15799 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15803 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15804 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15805 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15807 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15808 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15809 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15820 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15821 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15823 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15824 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15825 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15826 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15831 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15835 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15837 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15838 if(limit != VK_WHOLE_SIZE)
15840 m_HeapSizeLimitMask |= 1u << heapIndex;
15841 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15843 m_MemProps.memoryHeaps[heapIndex].size = limit;
15849 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15851 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15853 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15857 preferredBlockSize,
15860 GetBufferImageGranularity(),
15867 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15874 VkResult res = VK_SUCCESS;
15879 #if VMA_RECORDING_ENABLED
15880 m_pRecorder = vma_new(
this, VmaRecorder)();
15882 if(res != VK_SUCCESS)
15886 m_pRecorder->WriteConfiguration(
15887 m_PhysicalDeviceProperties,
15889 m_VulkanApiVersion,
15890 m_UseKhrDedicatedAllocation,
15891 m_UseKhrBindMemory2,
15892 m_UseExtMemoryBudget,
15893 m_UseAmdDeviceCoherentMemory);
15894 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15896 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15897 return VK_ERROR_FEATURE_NOT_PRESENT;
15901 #if VMA_MEMORY_BUDGET
15902 if(m_UseExtMemoryBudget)
15904 UpdateVulkanBudget();
15906 #endif // #if VMA_MEMORY_BUDGET
15911 VmaAllocator_T::~VmaAllocator_T()
15913 #if VMA_RECORDING_ENABLED
15914 if(m_pRecorder != VMA_NULL)
15916 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15917 vma_delete(
this, m_pRecorder);
15921 VMA_ASSERT(m_Pools.empty());
15923 for(
size_t i = GetMemoryTypeCount(); i--; )
15925 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15927 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15930 vma_delete(
this, m_pDedicatedAllocations[i]);
15931 vma_delete(
this, m_pBlockVectors[i]);
15935 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15937 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15938 ImportVulkanFunctions_Static();
15941 if(pVulkanFunctions != VMA_NULL)
15943 ImportVulkanFunctions_Custom(pVulkanFunctions);
15946 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15947 ImportVulkanFunctions_Dynamic();
15950 ValidateVulkanFunctions();
15953 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15955 void VmaAllocator_T::ImportVulkanFunctions_Static()
15958 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15959 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15960 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15961 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15962 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15963 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15964 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15965 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15966 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15967 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15968 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15969 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15970 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15971 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15972 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15973 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15974 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15977 #if VMA_VULKAN_VERSION >= 1001000
15978 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15980 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15981 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15982 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15983 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15984 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15989 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15991 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15993 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15995 #define VMA_COPY_IF_NOT_NULL(funcName) \
15996 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15998 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15999 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
16000 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
16001 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
16002 VMA_COPY_IF_NOT_NULL(vkMapMemory);
16003 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
16004 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
16005 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
16006 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
16007 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
16008 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
16009 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
16010 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
16011 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
16012 VMA_COPY_IF_NOT_NULL(vkCreateImage);
16013 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
16014 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
16016 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16017 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
16018 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
16021 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16022 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
16023 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
16026 #if VMA_MEMORY_BUDGET
16027 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
16030 #undef VMA_COPY_IF_NOT_NULL
16033 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16035 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
16037 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
16038 if(m_VulkanFunctions.memberName == VMA_NULL) \
16039 m_VulkanFunctions.memberName = \
16040 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
16041 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
16042 if(m_VulkanFunctions.memberName == VMA_NULL) \
16043 m_VulkanFunctions.memberName = \
16044 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
16046 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
16047 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
16048 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
16049 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
16050 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
16051 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
16052 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
16053 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
16054 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
16055 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
16056 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
16057 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
16058 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
16059 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
16060 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
16061 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
16062 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16064 #if VMA_VULKAN_VERSION >= 1001000
16065 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16067 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16068 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16069 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16070 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16071 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16075 #if VMA_DEDICATED_ALLOCATION
16076 if(m_UseKhrDedicatedAllocation)
16078 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16079 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16083 #if VMA_BIND_MEMORY2
16084 if(m_UseKhrBindMemory2)
16086 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16087 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16089 #endif // #if VMA_BIND_MEMORY2
16091 #if VMA_MEMORY_BUDGET
16092 if(m_UseExtMemoryBudget)
16094 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16096 #endif // #if VMA_MEMORY_BUDGET
16098 #undef VMA_FETCH_DEVICE_FUNC
16099 #undef VMA_FETCH_INSTANCE_FUNC
16102 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16104 void VmaAllocator_T::ValidateVulkanFunctions()
16106 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16107 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16108 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16109 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16110 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16111 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16112 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16113 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16114 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16115 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16116 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16117 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16118 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16119 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16120 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16121 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16122 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16124 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16125 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16127 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16128 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16132 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16133 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16135 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16136 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16140 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16141 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16143 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16148 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16150 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16151 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16152 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16153 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16156 VkResult VmaAllocator_T::AllocateMemoryOfType(
16158 VkDeviceSize alignment,
16159 bool dedicatedAllocation,
16160 VkBuffer dedicatedBuffer,
16161 VkBufferUsageFlags dedicatedBufferUsage,
16162 VkImage dedicatedImage,
16164 uint32_t memTypeIndex,
16165 VmaSuballocationType suballocType,
16166 size_t allocationCount,
16169 VMA_ASSERT(pAllocations != VMA_NULL);
16170 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16176 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16186 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16187 VMA_ASSERT(blockVector);
16189 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16190 bool preferDedicatedMemory =
16191 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16192 dedicatedAllocation ||
16194 size > preferredBlockSize / 2;
16196 if(preferDedicatedMemory &&
16198 finalCreateInfo.
pool == VK_NULL_HANDLE)
16207 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16211 return AllocateDedicatedMemory(
16221 dedicatedBufferUsage,
16229 VkResult res = blockVector->Allocate(
16230 m_CurrentFrameIndex.load(),
16237 if(res == VK_SUCCESS)
16245 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16249 res = AllocateDedicatedMemory(
16259 dedicatedBufferUsage,
16263 if(res == VK_SUCCESS)
16266 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16272 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16279 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16281 VmaSuballocationType suballocType,
16282 uint32_t memTypeIndex,
16285 bool isUserDataString,
16288 VkBuffer dedicatedBuffer,
16289 VkBufferUsageFlags dedicatedBufferUsage,
16290 VkImage dedicatedImage,
16291 size_t allocationCount,
16294 VMA_ASSERT(allocationCount > 0 && pAllocations);
16298 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16300 GetBudget(&heapBudget, heapIndex, 1);
16301 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16303 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16307 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16308 allocInfo.memoryTypeIndex = memTypeIndex;
16309 allocInfo.allocationSize = size;
16311 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16312 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16313 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16315 if(dedicatedBuffer != VK_NULL_HANDLE)
16317 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16318 dedicatedAllocInfo.buffer = dedicatedBuffer;
16319 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16321 else if(dedicatedImage != VK_NULL_HANDLE)
16323 dedicatedAllocInfo.image = dedicatedImage;
16324 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16327 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16329 #if VMA_BUFFER_DEVICE_ADDRESS
16330 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16331 if(m_UseKhrBufferDeviceAddress)
16333 bool canContainBufferWithDeviceAddress =
true;
16334 if(dedicatedBuffer != VK_NULL_HANDLE)
16336 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16337 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16339 else if(dedicatedImage != VK_NULL_HANDLE)
16341 canContainBufferWithDeviceAddress =
false;
16343 if(canContainBufferWithDeviceAddress)
16345 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16346 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16349 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
16351 #if VMA_MEMORY_PRIORITY
16352 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
16353 if(m_UseExtMemoryPriority)
16355 priorityInfo.priority = priority;
16356 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
16358 #endif // #if VMA_MEMORY_PRIORITY
16361 VkResult res = VK_SUCCESS;
16362 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16364 res = AllocateDedicatedMemoryPage(
16372 pAllocations + allocIndex);
16373 if(res != VK_SUCCESS)
16379 if(res == VK_SUCCESS)
16383 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16384 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16385 VMA_ASSERT(pDedicatedAllocations);
16386 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16388 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
16392 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16397 while(allocIndex--)
16400 VkDeviceMemory hMemory = currAlloc->GetMemory();
16412 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16413 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16414 currAlloc->SetUserData(
this, VMA_NULL);
16415 m_AllocationObjectAllocator.Free(currAlloc);
16418 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16424 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16426 VmaSuballocationType suballocType,
16427 uint32_t memTypeIndex,
16428 const VkMemoryAllocateInfo& allocInfo,
16430 bool isUserDataString,
16434 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16435 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16438 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16442 void* pMappedData = VMA_NULL;
16445 res = (*m_VulkanFunctions.vkMapMemory)(
16454 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16455 FreeVulkanMemory(memTypeIndex, size, hMemory);
16460 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16461 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16462 (*pAllocation)->SetUserData(
this, pUserData);
16463 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16464 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16466 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16472 void VmaAllocator_T::GetBufferMemoryRequirements(
16474 VkMemoryRequirements& memReq,
16475 bool& requiresDedicatedAllocation,
16476 bool& prefersDedicatedAllocation)
const
16478 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16479 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16481 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16482 memReqInfo.buffer = hBuffer;
16484 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16486 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16487 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16489 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16491 memReq = memReq2.memoryRequirements;
16492 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16493 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16496 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16498 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16499 requiresDedicatedAllocation =
false;
16500 prefersDedicatedAllocation =
false;
16504 void VmaAllocator_T::GetImageMemoryRequirements(
16506 VkMemoryRequirements& memReq,
16507 bool& requiresDedicatedAllocation,
16508 bool& prefersDedicatedAllocation)
const
16510 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16511 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16513 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16514 memReqInfo.image = hImage;
16516 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16518 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16519 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16521 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16523 memReq = memReq2.memoryRequirements;
16524 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16525 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16528 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16530 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16531 requiresDedicatedAllocation =
false;
16532 prefersDedicatedAllocation =
false;
16536 VkResult VmaAllocator_T::AllocateMemory(
16537 const VkMemoryRequirements& vkMemReq,
16538 bool requiresDedicatedAllocation,
16539 bool prefersDedicatedAllocation,
16540 VkBuffer dedicatedBuffer,
16541 VkBufferUsageFlags dedicatedBufferUsage,
16542 VkImage dedicatedImage,
16544 VmaSuballocationType suballocType,
16545 size_t allocationCount,
16548 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16550 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16552 if(vkMemReq.size == 0)
16554 return VK_ERROR_VALIDATION_FAILED_EXT;
16559 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16560 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16565 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16566 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16568 if(requiresDedicatedAllocation)
16572 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16573 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16575 if(createInfo.
pool != VK_NULL_HANDLE)
16577 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16578 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16581 if((createInfo.
pool != VK_NULL_HANDLE) &&
16584 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16585 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16588 if(createInfo.
pool != VK_NULL_HANDLE)
16590 const VkDeviceSize alignmentForPool = VMA_MAX(
16591 vkMemReq.alignment,
16592 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16597 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16602 return createInfo.
pool->m_BlockVector.Allocate(
16603 m_CurrentFrameIndex.load(),
16614 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16615 uint32_t memTypeIndex = UINT32_MAX;
16617 if(res == VK_SUCCESS)
16619 VkDeviceSize alignmentForMemType = VMA_MAX(
16620 vkMemReq.alignment,
16621 GetMemoryTypeMinAlignment(memTypeIndex));
16623 res = AllocateMemoryOfType(
16625 alignmentForMemType,
16626 requiresDedicatedAllocation || prefersDedicatedAllocation,
16628 dedicatedBufferUsage,
16636 if(res == VK_SUCCESS)
16646 memoryTypeBits &= ~(1u << memTypeIndex);
16649 if(res == VK_SUCCESS)
16651 alignmentForMemType = VMA_MAX(
16652 vkMemReq.alignment,
16653 GetMemoryTypeMinAlignment(memTypeIndex));
16655 res = AllocateMemoryOfType(
16657 alignmentForMemType,
16658 requiresDedicatedAllocation || prefersDedicatedAllocation,
16660 dedicatedBufferUsage,
16668 if(res == VK_SUCCESS)
16678 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16689 void VmaAllocator_T::FreeMemory(
16690 size_t allocationCount,
16693 VMA_ASSERT(pAllocations);
16695 for(
size_t allocIndex = allocationCount; allocIndex--; )
16699 if(allocation != VK_NULL_HANDLE)
16701 if(TouchAllocation(allocation))
16703 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16705 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16708 switch(allocation->GetType())
16710 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16712 VmaBlockVector* pBlockVector = VMA_NULL;
16713 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16714 if(hPool != VK_NULL_HANDLE)
16716 pBlockVector = &hPool->m_BlockVector;
16720 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16721 pBlockVector = m_pBlockVectors[memTypeIndex];
16723 pBlockVector->Free(allocation);
16726 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16727 FreeDedicatedMemory(allocation);
16735 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16736 allocation->SetUserData(
this, VMA_NULL);
16737 m_AllocationObjectAllocator.Free(allocation);
16742 VkResult VmaAllocator_T::ResizeAllocation(
16744 VkDeviceSize newSize)
16747 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16749 return VK_ERROR_VALIDATION_FAILED_EXT;
16751 if(newSize == alloc->GetSize())
16755 return VK_ERROR_OUT_OF_POOL_MEMORY;
16758 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16761 InitStatInfo(pStats->
total);
16762 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16764 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16768 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16770 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16771 VMA_ASSERT(pBlockVector);
16772 pBlockVector->AddStats(pStats);
16777 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16778 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16780 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16785 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16787 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16788 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16789 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16790 VMA_ASSERT(pDedicatedAllocVector);
16791 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16794 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16795 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16796 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16797 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16802 VmaPostprocessCalcStatInfo(pStats->
total);
16803 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16804 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16805 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16806 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16809 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16811 #if VMA_MEMORY_BUDGET
16812 if(m_UseExtMemoryBudget)
16814 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16816 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16817 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16819 const uint32_t heapIndex = firstHeap + i;
16821 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16824 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16826 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16827 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16831 outBudget->
usage = 0;
16835 outBudget->
budget = VMA_MIN(
16836 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16841 UpdateVulkanBudget();
16842 GetBudget(outBudget, firstHeap, heapCount);
16848 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16850 const uint32_t heapIndex = firstHeap + i;
16852 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16856 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16861 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16863 VkResult VmaAllocator_T::DefragmentationBegin(
16873 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16874 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16877 (*pContext)->AddAllocations(
16880 VkResult res = (*pContext)->Defragment(
16885 if(res != VK_NOT_READY)
16887 vma_delete(
this, *pContext);
16888 *pContext = VMA_NULL;
16894 VkResult VmaAllocator_T::DefragmentationEnd(
16897 vma_delete(
this, context);
16901 VkResult VmaAllocator_T::DefragmentationPassBegin(
16905 return context->DefragmentPassBegin(pInfo);
16907 VkResult VmaAllocator_T::DefragmentationPassEnd(
16910 return context->DefragmentPassEnd();
16916 if(hAllocation->CanBecomeLost())
16922 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16923 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16926 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16930 pAllocationInfo->
offset = 0;
16931 pAllocationInfo->
size = hAllocation->GetSize();
16933 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16936 else if(localLastUseFrameIndex == localCurrFrameIndex)
16938 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16939 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16940 pAllocationInfo->
offset = hAllocation->GetOffset();
16941 pAllocationInfo->
size = hAllocation->GetSize();
16943 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16948 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16950 localLastUseFrameIndex = localCurrFrameIndex;
16957 #if VMA_STATS_STRING_ENABLED
16958 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16959 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16962 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16963 if(localLastUseFrameIndex == localCurrFrameIndex)
16969 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16971 localLastUseFrameIndex = localCurrFrameIndex;
16977 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16978 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16979 pAllocationInfo->
offset = hAllocation->GetOffset();
16980 pAllocationInfo->
size = hAllocation->GetSize();
16981 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16982 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16986 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16989 if(hAllocation->CanBecomeLost())
16991 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16992 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16995 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16999 else if(localLastUseFrameIndex == localCurrFrameIndex)
17005 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17007 localLastUseFrameIndex = localCurrFrameIndex;
17014 #if VMA_STATS_STRING_ENABLED
17015 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17016 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17019 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17020 if(localLastUseFrameIndex == localCurrFrameIndex)
17026 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17028 localLastUseFrameIndex = localCurrFrameIndex;
17040 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
17050 return VK_ERROR_INITIALIZATION_FAILED;
17054 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
17056 return VK_ERROR_FEATURE_NOT_PRESENT;
17059 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
17061 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
17063 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
17064 if(res != VK_SUCCESS)
17066 vma_delete(
this, *pPool);
17073 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17074 (*pPool)->SetId(m_NextPoolId++);
17075 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
17081 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17085 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17086 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
17087 VMA_ASSERT(success &&
"Pool not found in Allocator.");
17090 vma_delete(
this, pool);
17095 pool->m_BlockVector.GetPoolStats(pPoolStats);
17098 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17100 m_CurrentFrameIndex.store(frameIndex);
17102 #if VMA_MEMORY_BUDGET
17103 if(m_UseExtMemoryBudget)
17105 UpdateVulkanBudget();
17107 #endif // #if VMA_MEMORY_BUDGET
17110 void VmaAllocator_T::MakePoolAllocationsLost(
17112 size_t* pLostAllocationCount)
17114 hPool->m_BlockVector.MakePoolAllocationsLost(
17115 m_CurrentFrameIndex.load(),
17116 pLostAllocationCount);
17119 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17121 return hPool->m_BlockVector.CheckCorruption();
17124 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17126 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17129 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17131 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17133 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17134 VMA_ASSERT(pBlockVector);
17135 VkResult localRes = pBlockVector->CheckCorruption();
17138 case VK_ERROR_FEATURE_NOT_PRESENT:
17141 finalRes = VK_SUCCESS;
17151 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17152 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
17154 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17156 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
17159 case VK_ERROR_FEATURE_NOT_PRESENT:
17162 finalRes = VK_SUCCESS;
17174 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17176 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17177 (*pAllocation)->InitLost();
17180 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17182 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17185 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17187 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17188 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17191 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17192 if(blockBytesAfterAllocation > heapSize)
17194 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17196 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17204 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17208 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17210 if(res == VK_SUCCESS)
17212 #if VMA_MEMORY_BUDGET
17213 ++m_Budget.m_OperationsSinceBudgetFetch;
17217 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17219 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17224 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17230 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17233 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17235 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17239 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17241 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17244 VkResult VmaAllocator_T::BindVulkanBuffer(
17245 VkDeviceMemory memory,
17246 VkDeviceSize memoryOffset,
17250 if(pNext != VMA_NULL)
17252 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17253 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17254 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17256 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17257 bindBufferMemoryInfo.pNext = pNext;
17258 bindBufferMemoryInfo.buffer = buffer;
17259 bindBufferMemoryInfo.memory = memory;
17260 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17261 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17264 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17266 return VK_ERROR_EXTENSION_NOT_PRESENT;
17271 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17275 VkResult VmaAllocator_T::BindVulkanImage(
17276 VkDeviceMemory memory,
17277 VkDeviceSize memoryOffset,
17281 if(pNext != VMA_NULL)
17283 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17284 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17285 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17287 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17288 bindBufferMemoryInfo.pNext = pNext;
17289 bindBufferMemoryInfo.image = image;
17290 bindBufferMemoryInfo.memory = memory;
17291 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17292 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17295 #endif // #if VMA_BIND_MEMORY2
17297 return VK_ERROR_EXTENSION_NOT_PRESENT;
17302 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17306 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17308 if(hAllocation->CanBecomeLost())
17310 return VK_ERROR_MEMORY_MAP_FAILED;
17313 switch(hAllocation->GetType())
17315 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17317 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17318 char *pBytes = VMA_NULL;
17319 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17320 if(res == VK_SUCCESS)
17322 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17323 hAllocation->BlockAllocMap();
17327 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17328 return hAllocation->DedicatedAllocMap(
this, ppData);
17331 return VK_ERROR_MEMORY_MAP_FAILED;
17337 switch(hAllocation->GetType())
17339 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17341 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17342 hAllocation->BlockAllocUnmap();
17343 pBlock->Unmap(
this, 1);
17346 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17347 hAllocation->DedicatedAllocUnmap(
this);
17354 VkResult VmaAllocator_T::BindBufferMemory(
17356 VkDeviceSize allocationLocalOffset,
17360 VkResult res = VK_SUCCESS;
17361 switch(hAllocation->GetType())
17363 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17364 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17366 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17368 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17369 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17370 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17379 VkResult VmaAllocator_T::BindImageMemory(
17381 VkDeviceSize allocationLocalOffset,
17385 VkResult res = VK_SUCCESS;
17386 switch(hAllocation->GetType())
17388 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17389 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17391 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17393 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17394 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17395 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17404 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17406 VkDeviceSize offset, VkDeviceSize size,
17407 VMA_CACHE_OPERATION op)
17409 VkResult res = VK_SUCCESS;
17411 VkMappedMemoryRange memRange = {};
17412 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17416 case VMA_CACHE_FLUSH:
17417 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17419 case VMA_CACHE_INVALIDATE:
17420 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17430 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17431 uint32_t allocationCount,
17433 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17434 VMA_CACHE_OPERATION op)
17436 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17437 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17438 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17440 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17443 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17444 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17445 VkMappedMemoryRange newRange;
17446 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17448 ranges.push_back(newRange);
17452 VkResult res = VK_SUCCESS;
17453 if(!ranges.empty())
17457 case VMA_CACHE_FLUSH:
17458 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17460 case VMA_CACHE_INVALIDATE:
17461 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17471 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17473 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17475 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17477 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17478 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
17479 VMA_ASSERT(pDedicatedAllocations);
17480 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
17481 VMA_ASSERT(success);
17484 VkDeviceMemory hMemory = allocation->GetMemory();
17496 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17498 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17501 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17503 VkBufferCreateInfo dummyBufCreateInfo;
17504 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17506 uint32_t memoryTypeBits = 0;
17509 VkBuffer buf = VK_NULL_HANDLE;
17510 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17511 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17512 if(res == VK_SUCCESS)
17515 VkMemoryRequirements memReq;
17516 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17517 memoryTypeBits = memReq.memoryTypeBits;
17520 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17523 return memoryTypeBits;
17526 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17529 VMA_ASSERT(GetMemoryTypeCount() > 0);
17531 uint32_t memoryTypeBits = UINT32_MAX;
17533 if(!m_UseAmdDeviceCoherentMemory)
17536 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17538 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17540 memoryTypeBits &= ~(1u << memTypeIndex);
17545 return memoryTypeBits;
17548 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17550 VkDeviceSize offset, VkDeviceSize size,
17551 VkMappedMemoryRange& outRange)
const
17553 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17554 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17556 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17557 const VkDeviceSize allocationSize = allocation->GetSize();
17558 VMA_ASSERT(offset <= allocationSize);
17560 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17561 outRange.pNext = VMA_NULL;
17562 outRange.memory = allocation->GetMemory();
17564 switch(allocation->GetType())
17566 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17567 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17568 if(size == VK_WHOLE_SIZE)
17570 outRange.size = allocationSize - outRange.offset;
17574 VMA_ASSERT(offset + size <= allocationSize);
17575 outRange.size = VMA_MIN(
17576 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17577 allocationSize - outRange.offset);
17580 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17583 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17584 if(size == VK_WHOLE_SIZE)
17586 size = allocationSize - offset;
17590 VMA_ASSERT(offset + size <= allocationSize);
17592 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17595 const VkDeviceSize allocationOffset = allocation->GetOffset();
17596 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17597 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17598 outRange.offset += allocationOffset;
17599 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17611 #if VMA_MEMORY_BUDGET
17613 void VmaAllocator_T::UpdateVulkanBudget()
17615 VMA_ASSERT(m_UseExtMemoryBudget);
17617 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17619 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17620 VmaPnextChainPushFront(&memProps, &budgetProps);
17622 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17625 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17627 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17629 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17630 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17631 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17634 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17636 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17638 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17640 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17642 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17644 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17647 m_Budget.m_OperationsSinceBudgetFetch = 0;
17651 #endif // #if VMA_MEMORY_BUDGET
17653 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17655 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17656 !hAllocation->CanBecomeLost() &&
17657 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17659 void* pData = VMA_NULL;
17660 VkResult res = Map(hAllocation, &pData);
17661 if(res == VK_SUCCESS)
17663 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17664 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17665 Unmap(hAllocation);
17669 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17674 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17676 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17677 if(memoryTypeBits == UINT32_MAX)
17679 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17680 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17682 return memoryTypeBits;
17685 #if VMA_STATS_STRING_ENABLED
17687 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17689 bool dedicatedAllocationsStarted =
false;
17690 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17692 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17693 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17694 VMA_ASSERT(pDedicatedAllocVector);
17695 if(pDedicatedAllocVector->empty() ==
false)
17697 if(dedicatedAllocationsStarted ==
false)
17699 dedicatedAllocationsStarted =
true;
17700 json.WriteString(
"DedicatedAllocations");
17701 json.BeginObject();
17704 json.BeginString(
"Type ");
17705 json.ContinueString(memTypeIndex);
17710 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17712 json.BeginObject(
true);
17714 hAlloc->PrintParameters(json);
17721 if(dedicatedAllocationsStarted)
17727 bool allocationsStarted =
false;
17728 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17730 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17732 if(allocationsStarted ==
false)
17734 allocationsStarted =
true;
17735 json.WriteString(
"DefaultPools");
17736 json.BeginObject();
17739 json.BeginString(
"Type ");
17740 json.ContinueString(memTypeIndex);
17743 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17746 if(allocationsStarted)
17754 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17755 const size_t poolCount = m_Pools.size();
17758 json.WriteString(
"Pools");
17759 json.BeginObject();
17760 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17762 json.BeginString();
17763 json.ContinueString(m_Pools[poolIndex]->GetId());
17766 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17773 #endif // #if VMA_STATS_STRING_ENABLED
17782 VMA_ASSERT(pCreateInfo && pAllocator);
17785 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17787 return (*pAllocator)->Init(pCreateInfo);
17793 if(allocator != VK_NULL_HANDLE)
17795 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17796 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17797 vma_delete(&allocationCallbacks, allocator);
17803 VMA_ASSERT(allocator && pAllocatorInfo);
17804 pAllocatorInfo->
instance = allocator->m_hInstance;
17805 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17806 pAllocatorInfo->
device = allocator->m_hDevice;
17811 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17813 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17814 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17819 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17821 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17822 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17827 uint32_t memoryTypeIndex,
17828 VkMemoryPropertyFlags* pFlags)
17830 VMA_ASSERT(allocator && pFlags);
17831 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17832 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17837 uint32_t frameIndex)
17839 VMA_ASSERT(allocator);
17840 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17842 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17844 allocator->SetCurrentFrameIndex(frameIndex);
17851 VMA_ASSERT(allocator && pStats);
17852 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17853 allocator->CalculateStats(pStats);
17860 VMA_ASSERT(allocator && pBudget);
17861 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17862 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17865 #if VMA_STATS_STRING_ENABLED
17869 char** ppStatsString,
17870 VkBool32 detailedMap)
17872 VMA_ASSERT(allocator && ppStatsString);
17873 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17875 VmaStringBuilder sb(allocator);
17877 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17878 json.BeginObject();
17881 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17884 allocator->CalculateStats(&stats);
17886 json.WriteString(
"Total");
17887 VmaPrintStatInfo(json, stats.
total);
17889 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17891 json.BeginString(
"Heap ");
17892 json.ContinueString(heapIndex);
17894 json.BeginObject();
17896 json.WriteString(
"Size");
17897 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17899 json.WriteString(
"Flags");
17900 json.BeginArray(
true);
17901 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17903 json.WriteString(
"DEVICE_LOCAL");
17907 json.WriteString(
"Budget");
17908 json.BeginObject();
17910 json.WriteString(
"BlockBytes");
17911 json.WriteNumber(budget[heapIndex].blockBytes);
17912 json.WriteString(
"AllocationBytes");
17913 json.WriteNumber(budget[heapIndex].allocationBytes);
17914 json.WriteString(
"Usage");
17915 json.WriteNumber(budget[heapIndex].usage);
17916 json.WriteString(
"Budget");
17917 json.WriteNumber(budget[heapIndex].budget);
17923 json.WriteString(
"Stats");
17924 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17927 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17929 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17931 json.BeginString(
"Type ");
17932 json.ContinueString(typeIndex);
17935 json.BeginObject();
17937 json.WriteString(
"Flags");
17938 json.BeginArray(
true);
17939 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17940 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17942 json.WriteString(
"DEVICE_LOCAL");
17944 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17946 json.WriteString(
"HOST_VISIBLE");
17948 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17950 json.WriteString(
"HOST_COHERENT");
17952 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17954 json.WriteString(
"HOST_CACHED");
17956 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17958 json.WriteString(
"LAZILY_ALLOCATED");
17960 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17962 json.WriteString(
" PROTECTED");
17964 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17966 json.WriteString(
" DEVICE_COHERENT");
17968 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17970 json.WriteString(
" DEVICE_UNCACHED");
17976 json.WriteString(
"Stats");
17977 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17986 if(detailedMap == VK_TRUE)
17988 allocator->PrintDetailedMap(json);
17994 const size_t len = sb.GetLength();
17995 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17998 memcpy(pChars, sb.GetData(), len);
18000 pChars[len] =
'\0';
18001 *ppStatsString = pChars;
18006 char* pStatsString)
18008 if(pStatsString != VMA_NULL)
18010 VMA_ASSERT(allocator);
18011 size_t len = strlen(pStatsString);
18012 vma_delete_array(allocator, pStatsString, len + 1);
18016 #endif // #if VMA_STATS_STRING_ENABLED
18023 uint32_t memoryTypeBits,
18025 uint32_t* pMemoryTypeIndex)
18027 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18028 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18029 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18031 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
18038 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
18039 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
18040 uint32_t notPreferredFlags = 0;
18043 switch(pAllocationCreateInfo->
usage)
18048 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18050 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18054 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
18057 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18058 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18060 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18064 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18065 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
18068 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18071 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18080 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18082 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18085 *pMemoryTypeIndex = UINT32_MAX;
18086 uint32_t minCost = UINT32_MAX;
18087 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18088 memTypeIndex < allocator->GetMemoryTypeCount();
18089 ++memTypeIndex, memTypeBit <<= 1)
18092 if((memTypeBit & memoryTypeBits) != 0)
18094 const VkMemoryPropertyFlags currFlags =
18095 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18097 if((requiredFlags & ~currFlags) == 0)
18100 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18101 VmaCountBitsSet(currFlags & notPreferredFlags);
18103 if(currCost < minCost)
18105 *pMemoryTypeIndex = memTypeIndex;
18110 minCost = currCost;
18115 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18120 const VkBufferCreateInfo* pBufferCreateInfo,
18122 uint32_t* pMemoryTypeIndex)
18124 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18125 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18126 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18127 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18129 const VkDevice hDev = allocator->m_hDevice;
18130 VkBuffer hBuffer = VK_NULL_HANDLE;
18131 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18132 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18133 if(res == VK_SUCCESS)
18135 VkMemoryRequirements memReq = {};
18136 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18137 hDev, hBuffer, &memReq);
18141 memReq.memoryTypeBits,
18142 pAllocationCreateInfo,
18145 allocator->GetVulkanFunctions().vkDestroyBuffer(
18146 hDev, hBuffer, allocator->GetAllocationCallbacks());
18153 const VkImageCreateInfo* pImageCreateInfo,
18155 uint32_t* pMemoryTypeIndex)
18157 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18158 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18159 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18160 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18162 const VkDevice hDev = allocator->m_hDevice;
18163 VkImage hImage = VK_NULL_HANDLE;
18164 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18165 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18166 if(res == VK_SUCCESS)
18168 VkMemoryRequirements memReq = {};
18169 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18170 hDev, hImage, &memReq);
18174 memReq.memoryTypeBits,
18175 pAllocationCreateInfo,
18178 allocator->GetVulkanFunctions().vkDestroyImage(
18179 hDev, hImage, allocator->GetAllocationCallbacks());
18189 VMA_ASSERT(allocator && pCreateInfo && pPool);
18191 VMA_DEBUG_LOG(
"vmaCreatePool");
18193 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18195 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18197 #if VMA_RECORDING_ENABLED
18198 if(allocator->GetRecorder() != VMA_NULL)
18200 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18211 VMA_ASSERT(allocator);
18213 if(pool == VK_NULL_HANDLE)
18218 VMA_DEBUG_LOG(
"vmaDestroyPool");
18220 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18222 #if VMA_RECORDING_ENABLED
18223 if(allocator->GetRecorder() != VMA_NULL)
18225 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18229 allocator->DestroyPool(pool);
18237 VMA_ASSERT(allocator && pool && pPoolStats);
18239 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18241 allocator->GetPoolStats(pool, pPoolStats);
18247 size_t* pLostAllocationCount)
18249 VMA_ASSERT(allocator && pool);
18251 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18253 #if VMA_RECORDING_ENABLED
18254 if(allocator->GetRecorder() != VMA_NULL)
18256 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18260 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18265 VMA_ASSERT(allocator && pool);
18267 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18269 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18271 return allocator->CheckPoolCorruption(pool);
18277 const char** ppName)
18279 VMA_ASSERT(allocator && pool && ppName);
18281 VMA_DEBUG_LOG(
"vmaGetPoolName");
18283 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18285 *ppName = pool->GetName();
18293 VMA_ASSERT(allocator && pool);
18295 VMA_DEBUG_LOG(
"vmaSetPoolName");
18297 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18299 pool->SetName(pName);
18301 #if VMA_RECORDING_ENABLED
18302 if(allocator->GetRecorder() != VMA_NULL)
18304 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18311 const VkMemoryRequirements* pVkMemoryRequirements,
18316 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18318 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18320 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18322 VkResult result = allocator->AllocateMemory(
18323 *pVkMemoryRequirements,
18330 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18334 #if VMA_RECORDING_ENABLED
18335 if(allocator->GetRecorder() != VMA_NULL)
18337 allocator->GetRecorder()->RecordAllocateMemory(
18338 allocator->GetCurrentFrameIndex(),
18339 *pVkMemoryRequirements,
18345 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18347 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18355 const VkMemoryRequirements* pVkMemoryRequirements,
18357 size_t allocationCount,
18361 if(allocationCount == 0)
18366 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18368 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18370 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18372 VkResult result = allocator->AllocateMemory(
18373 *pVkMemoryRequirements,
18380 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18384 #if VMA_RECORDING_ENABLED
18385 if(allocator->GetRecorder() != VMA_NULL)
18387 allocator->GetRecorder()->RecordAllocateMemoryPages(
18388 allocator->GetCurrentFrameIndex(),
18389 *pVkMemoryRequirements,
18391 (uint64_t)allocationCount,
18396 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18398 for(
size_t i = 0; i < allocationCount; ++i)
18400 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18414 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18416 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18418 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18420 VkMemoryRequirements vkMemReq = {};
18421 bool requiresDedicatedAllocation =
false;
18422 bool prefersDedicatedAllocation =
false;
18423 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18424 requiresDedicatedAllocation,
18425 prefersDedicatedAllocation);
18427 VkResult result = allocator->AllocateMemory(
18429 requiresDedicatedAllocation,
18430 prefersDedicatedAllocation,
18435 VMA_SUBALLOCATION_TYPE_BUFFER,
18439 #if VMA_RECORDING_ENABLED
18440 if(allocator->GetRecorder() != VMA_NULL)
18442 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18443 allocator->GetCurrentFrameIndex(),
18445 requiresDedicatedAllocation,
18446 prefersDedicatedAllocation,
18452 if(pAllocationInfo && result == VK_SUCCESS)
18454 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18467 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18469 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18471 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18473 VkMemoryRequirements vkMemReq = {};
18474 bool requiresDedicatedAllocation =
false;
18475 bool prefersDedicatedAllocation =
false;
18476 allocator->GetImageMemoryRequirements(image, vkMemReq,
18477 requiresDedicatedAllocation, prefersDedicatedAllocation);
18479 VkResult result = allocator->AllocateMemory(
18481 requiresDedicatedAllocation,
18482 prefersDedicatedAllocation,
18487 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18491 #if VMA_RECORDING_ENABLED
18492 if(allocator->GetRecorder() != VMA_NULL)
18494 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18495 allocator->GetCurrentFrameIndex(),
18497 requiresDedicatedAllocation,
18498 prefersDedicatedAllocation,
18504 if(pAllocationInfo && result == VK_SUCCESS)
18506 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18516 VMA_ASSERT(allocator);
18518 if(allocation == VK_NULL_HANDLE)
18523 VMA_DEBUG_LOG(
"vmaFreeMemory");
18525 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18527 #if VMA_RECORDING_ENABLED
18528 if(allocator->GetRecorder() != VMA_NULL)
18530 allocator->GetRecorder()->RecordFreeMemory(
18531 allocator->GetCurrentFrameIndex(),
18536 allocator->FreeMemory(
18543 size_t allocationCount,
18546 if(allocationCount == 0)
18551 VMA_ASSERT(allocator);
18553 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18555 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18557 #if VMA_RECORDING_ENABLED
18558 if(allocator->GetRecorder() != VMA_NULL)
18560 allocator->GetRecorder()->RecordFreeMemoryPages(
18561 allocator->GetCurrentFrameIndex(),
18562 (uint64_t)allocationCount,
18567 allocator->FreeMemory(allocationCount, pAllocations);
18573 VkDeviceSize newSize)
18575 VMA_ASSERT(allocator && allocation);
18577 VMA_DEBUG_LOG(
"vmaResizeAllocation");
18579 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18581 return allocator->ResizeAllocation(allocation, newSize);
18589 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18591 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18593 #if VMA_RECORDING_ENABLED
18594 if(allocator->GetRecorder() != VMA_NULL)
18596 allocator->GetRecorder()->RecordGetAllocationInfo(
18597 allocator->GetCurrentFrameIndex(),
18602 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18609 VMA_ASSERT(allocator && allocation);
18611 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18613 #if VMA_RECORDING_ENABLED
18614 if(allocator->GetRecorder() != VMA_NULL)
18616 allocator->GetRecorder()->RecordTouchAllocation(
18617 allocator->GetCurrentFrameIndex(),
18622 return allocator->TouchAllocation(allocation);
18630 VMA_ASSERT(allocator && allocation);
18632 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18634 allocation->SetUserData(allocator, pUserData);
18636 #if VMA_RECORDING_ENABLED
18637 if(allocator->GetRecorder() != VMA_NULL)
18639 allocator->GetRecorder()->RecordSetAllocationUserData(
18640 allocator->GetCurrentFrameIndex(),
18651 VMA_ASSERT(allocator && pAllocation);
18653 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18655 allocator->CreateLostAllocation(pAllocation);
18657 #if VMA_RECORDING_ENABLED
18658 if(allocator->GetRecorder() != VMA_NULL)
18660 allocator->GetRecorder()->RecordCreateLostAllocation(
18661 allocator->GetCurrentFrameIndex(),
18672 VMA_ASSERT(allocator && allocation && ppData);
18674 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18676 VkResult res = allocator->Map(allocation, ppData);
18678 #if VMA_RECORDING_ENABLED
18679 if(allocator->GetRecorder() != VMA_NULL)
18681 allocator->GetRecorder()->RecordMapMemory(
18682 allocator->GetCurrentFrameIndex(),
18694 VMA_ASSERT(allocator && allocation);
18696 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18698 #if VMA_RECORDING_ENABLED
18699 if(allocator->GetRecorder() != VMA_NULL)
18701 allocator->GetRecorder()->RecordUnmapMemory(
18702 allocator->GetCurrentFrameIndex(),
18707 allocator->Unmap(allocation);
18712 VMA_ASSERT(allocator && allocation);
18714 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18716 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18718 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18720 #if VMA_RECORDING_ENABLED
18721 if(allocator->GetRecorder() != VMA_NULL)
18723 allocator->GetRecorder()->RecordFlushAllocation(
18724 allocator->GetCurrentFrameIndex(),
18725 allocation, offset, size);
18734 VMA_ASSERT(allocator && allocation);
18736 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18738 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18740 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18742 #if VMA_RECORDING_ENABLED
18743 if(allocator->GetRecorder() != VMA_NULL)
18745 allocator->GetRecorder()->RecordInvalidateAllocation(
18746 allocator->GetCurrentFrameIndex(),
18747 allocation, offset, size);
18756 uint32_t allocationCount,
18758 const VkDeviceSize* offsets,
18759 const VkDeviceSize* sizes)
18761 VMA_ASSERT(allocator);
18763 if(allocationCount == 0)
18768 VMA_ASSERT(allocations);
18770 VMA_DEBUG_LOG(
"vmaFlushAllocations");
18772 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18774 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
18776 #if VMA_RECORDING_ENABLED
18777 if(allocator->GetRecorder() != VMA_NULL)
18788 uint32_t allocationCount,
18790 const VkDeviceSize* offsets,
18791 const VkDeviceSize* sizes)
18793 VMA_ASSERT(allocator);
18795 if(allocationCount == 0)
18800 VMA_ASSERT(allocations);
18802 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
18804 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18806 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
18808 #if VMA_RECORDING_ENABLED
18809 if(allocator->GetRecorder() != VMA_NULL)
18820 VMA_ASSERT(allocator);
18822 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18824 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18826 return allocator->CheckCorruption(memoryTypeBits);
18832 size_t allocationCount,
18833 VkBool32* pAllocationsChanged,
18843 if(pDefragmentationInfo != VMA_NULL)
18857 if(res == VK_NOT_READY)
18870 VMA_ASSERT(allocator && pInfo && pContext);
18881 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18883 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18885 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18887 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18889 #if VMA_RECORDING_ENABLED
18890 if(allocator->GetRecorder() != VMA_NULL)
18892 allocator->GetRecorder()->RecordDefragmentationBegin(
18893 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18904 VMA_ASSERT(allocator);
18906 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18908 if(context != VK_NULL_HANDLE)
18910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18912 #if VMA_RECORDING_ENABLED
18913 if(allocator->GetRecorder() != VMA_NULL)
18915 allocator->GetRecorder()->RecordDefragmentationEnd(
18916 allocator->GetCurrentFrameIndex(), context);
18920 return allocator->DefragmentationEnd(context);
18934 VMA_ASSERT(allocator);
18937 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18939 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18941 if(context == VK_NULL_HANDLE)
18947 return allocator->DefragmentationPassBegin(pInfo, context);
18953 VMA_ASSERT(allocator);
18955 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18956 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18958 if(context == VK_NULL_HANDLE)
18961 return allocator->DefragmentationPassEnd(context);
18969 VMA_ASSERT(allocator && allocation && buffer);
18971 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18973 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18975 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18981 VkDeviceSize allocationLocalOffset,
18985 VMA_ASSERT(allocator && allocation && buffer);
18987 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18991 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18999 VMA_ASSERT(allocator && allocation && image);
19001 VMA_DEBUG_LOG(
"vmaBindImageMemory");
19003 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19005 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
19011 VkDeviceSize allocationLocalOffset,
19015 VMA_ASSERT(allocator && allocation && image);
19017 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
19019 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19021 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
19026 const VkBufferCreateInfo* pBufferCreateInfo,
19032 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
19034 if(pBufferCreateInfo->size == 0)
19036 return VK_ERROR_VALIDATION_FAILED_EXT;
19038 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19039 !allocator->m_UseKhrBufferDeviceAddress)
19041 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19042 return VK_ERROR_VALIDATION_FAILED_EXT;
19045 VMA_DEBUG_LOG(
"vmaCreateBuffer");
19047 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19049 *pBuffer = VK_NULL_HANDLE;
19050 *pAllocation = VK_NULL_HANDLE;
19053 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19054 allocator->m_hDevice,
19056 allocator->GetAllocationCallbacks(),
19061 VkMemoryRequirements vkMemReq = {};
19062 bool requiresDedicatedAllocation =
false;
19063 bool prefersDedicatedAllocation =
false;
19064 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19065 requiresDedicatedAllocation, prefersDedicatedAllocation);
19068 res = allocator->AllocateMemory(
19070 requiresDedicatedAllocation,
19071 prefersDedicatedAllocation,
19073 pBufferCreateInfo->usage,
19075 *pAllocationCreateInfo,
19076 VMA_SUBALLOCATION_TYPE_BUFFER,
19080 #if VMA_RECORDING_ENABLED
19081 if(allocator->GetRecorder() != VMA_NULL)
19083 allocator->GetRecorder()->RecordCreateBuffer(
19084 allocator->GetCurrentFrameIndex(),
19085 *pBufferCreateInfo,
19086 *pAllocationCreateInfo,
19096 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19101 #if VMA_STATS_STRING_ENABLED
19102 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19104 if(pAllocationInfo != VMA_NULL)
19106 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19111 allocator->FreeMemory(
19114 *pAllocation = VK_NULL_HANDLE;
19115 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19116 *pBuffer = VK_NULL_HANDLE;
19119 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19120 *pBuffer = VK_NULL_HANDLE;
19131 VMA_ASSERT(allocator);
19133 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19138 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19140 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19142 #if VMA_RECORDING_ENABLED
19143 if(allocator->GetRecorder() != VMA_NULL)
19145 allocator->GetRecorder()->RecordDestroyBuffer(
19146 allocator->GetCurrentFrameIndex(),
19151 if(buffer != VK_NULL_HANDLE)
19153 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19156 if(allocation != VK_NULL_HANDLE)
19158 allocator->FreeMemory(
19166 const VkImageCreateInfo* pImageCreateInfo,
19172 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19174 if(pImageCreateInfo->extent.width == 0 ||
19175 pImageCreateInfo->extent.height == 0 ||
19176 pImageCreateInfo->extent.depth == 0 ||
19177 pImageCreateInfo->mipLevels == 0 ||
19178 pImageCreateInfo->arrayLayers == 0)
19180 return VK_ERROR_VALIDATION_FAILED_EXT;
19183 VMA_DEBUG_LOG(
"vmaCreateImage");
19185 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19187 *pImage = VK_NULL_HANDLE;
19188 *pAllocation = VK_NULL_HANDLE;
19191 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19192 allocator->m_hDevice,
19194 allocator->GetAllocationCallbacks(),
19198 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19199 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19200 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19203 VkMemoryRequirements vkMemReq = {};
19204 bool requiresDedicatedAllocation =
false;
19205 bool prefersDedicatedAllocation =
false;
19206 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19207 requiresDedicatedAllocation, prefersDedicatedAllocation);
19209 res = allocator->AllocateMemory(
19211 requiresDedicatedAllocation,
19212 prefersDedicatedAllocation,
19216 *pAllocationCreateInfo,
19221 #if VMA_RECORDING_ENABLED
19222 if(allocator->GetRecorder() != VMA_NULL)
19224 allocator->GetRecorder()->RecordCreateImage(
19225 allocator->GetCurrentFrameIndex(),
19227 *pAllocationCreateInfo,
19237 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19242 #if VMA_STATS_STRING_ENABLED
19243 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19245 if(pAllocationInfo != VMA_NULL)
19247 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19252 allocator->FreeMemory(
19255 *pAllocation = VK_NULL_HANDLE;
19256 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19257 *pImage = VK_NULL_HANDLE;
19260 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19261 *pImage = VK_NULL_HANDLE;
19272 VMA_ASSERT(allocator);
19274 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19279 VMA_DEBUG_LOG(
"vmaDestroyImage");
19281 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19283 #if VMA_RECORDING_ENABLED
19284 if(allocator->GetRecorder() != VMA_NULL)
19286 allocator->GetRecorder()->RecordDestroyImage(
19287 allocator->GetCurrentFrameIndex(),
19292 if(image != VK_NULL_HANDLE)
19294 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19296 if(allocation != VK_NULL_HANDLE)
19298 allocator->FreeMemory(
19304 #endif // #ifdef VMA_IMPLEMENTATION