23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2030 #ifndef VMA_RECORDING_ENABLED
2031 #define VMA_RECORDING_ENABLED 0
2034 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2038 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2039 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2040 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2041 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2042 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2043 extern PFN_vkAllocateMemory vkAllocateMemory;
2044 extern PFN_vkFreeMemory vkFreeMemory;
2045 extern PFN_vkMapMemory vkMapMemory;
2046 extern PFN_vkUnmapMemory vkUnmapMemory;
2047 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2048 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2049 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2050 extern PFN_vkBindImageMemory vkBindImageMemory;
2051 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2052 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2053 extern PFN_vkCreateBuffer vkCreateBuffer;
2054 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2055 extern PFN_vkCreateImage vkCreateImage;
2056 extern PFN_vkDestroyImage vkDestroyImage;
2057 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2058 #if VMA_VULKAN_VERSION >= 1001000
2059 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2060 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2061 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2062 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2063 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2068 #include <vulkan/vulkan.h>
2074 #if !defined(VMA_VULKAN_VERSION)
2075 #if defined(VK_VERSION_1_2)
2076 #define VMA_VULKAN_VERSION 1002000
2077 #elif defined(VK_VERSION_1_1)
2078 #define VMA_VULKAN_VERSION 1001000
2080 #define VMA_VULKAN_VERSION 1000000
2084 #if !defined(VMA_DEDICATED_ALLOCATION)
2085 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2086 #define VMA_DEDICATED_ALLOCATION 1
2088 #define VMA_DEDICATED_ALLOCATION 0
2092 #if !defined(VMA_BIND_MEMORY2)
2093 #if VK_KHR_bind_memory2
2094 #define VMA_BIND_MEMORY2 1
2096 #define VMA_BIND_MEMORY2 0
2100 #if !defined(VMA_MEMORY_BUDGET)
2101 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2102 #define VMA_MEMORY_BUDGET 1
2104 #define VMA_MEMORY_BUDGET 0
2109 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2110 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2111 #define VMA_BUFFER_DEVICE_ADDRESS 1
2113 #define VMA_BUFFER_DEVICE_ADDRESS 0
2118 #if !defined(VMA_MEMORY_PRIORITY)
2119 #if VK_EXT_memory_priority
2120 #define VMA_MEMORY_PRIORITY 1
2122 #define VMA_MEMORY_PRIORITY 0
2131 #ifndef VMA_CALL_PRE
2132 #define VMA_CALL_PRE
2134 #ifndef VMA_CALL_POST
2135 #define VMA_CALL_POST
2149 #ifndef VMA_LEN_IF_NOT_NULL
2150 #define VMA_LEN_IF_NOT_NULL(len)
2155 #ifndef VMA_NULLABLE
2157 #define VMA_NULLABLE _Nullable
2159 #define VMA_NULLABLE
2165 #ifndef VMA_NOT_NULL
2167 #define VMA_NOT_NULL _Nonnull
2169 #define VMA_NOT_NULL
2175 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2176 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2177 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2179 #define VMA_NOT_NULL_NON_DISPATCHABLE
2183 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2184 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2185 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2187 #define VMA_NULLABLE_NON_DISPATCHABLE
2205 uint32_t memoryType,
2206 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2208 void* VMA_NULLABLE pUserData);
2212 uint32_t memoryType,
2213 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2215 void* VMA_NULLABLE pUserData);
2372 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2373 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2374 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2376 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2377 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2378 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2380 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2381 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2471 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2544 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2552 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2562 uint32_t memoryTypeIndex,
2563 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2575 uint32_t frameIndex);
2671 #ifndef VMA_STATS_STRING_ENABLED
2672 #define VMA_STATS_STRING_ENABLED 1
2675 #if VMA_STATS_STRING_ENABLED
2682 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2683 VkBool32 detailedMap);
2687 char* VMA_NULLABLE pStatsString);
2948 uint32_t memoryTypeBits,
2950 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2966 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2968 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2984 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2986 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3136 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3164 size_t* VMA_NULLABLE pLostAllocationCount);
3191 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3201 const char* VMA_NULLABLE pName);
3295 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3321 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3323 size_t allocationCount,
3324 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3325 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3335 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3343 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3368 size_t allocationCount,
3369 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3426 void* VMA_NULLABLE pUserData);
3483 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3521 VkDeviceSize offset,
3548 VkDeviceSize offset,
3567 uint32_t allocationCount,
3568 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3569 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3570 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3588 uint32_t allocationCount,
3589 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3590 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3591 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3670 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3704 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3842 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3843 size_t allocationCount,
3844 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3863 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3878 VkDeviceSize allocationLocalOffset,
3879 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3880 const void* VMA_NULLABLE pNext);
3897 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3912 VkDeviceSize allocationLocalOffset,
3913 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3914 const void* VMA_NULLABLE pNext);
3948 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3950 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3967 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3973 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3975 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3992 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
4002 #if defined(__cplusplus) && defined(__INTELLISENSE__)
4003 #define VMA_IMPLEMENTATION
4006 #ifdef VMA_IMPLEMENTATION
4007 #undef VMA_IMPLEMENTATION
4014 #if VMA_RECORDING_ENABLED
4017 #include <windows.h>
4037 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4038 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4047 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4048 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4049 #if defined(VK_NO_PROTOTYPES)
4050 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4051 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4064 #if VMA_USE_STL_CONTAINERS
4065 #define VMA_USE_STL_VECTOR 1
4066 #define VMA_USE_STL_UNORDERED_MAP 1
4067 #define VMA_USE_STL_LIST 1
4070 #ifndef VMA_USE_STL_SHARED_MUTEX
4072 #if __cplusplus >= 201703L
4073 #define VMA_USE_STL_SHARED_MUTEX 1
4077 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4078 #define VMA_USE_STL_SHARED_MUTEX 1
4080 #define VMA_USE_STL_SHARED_MUTEX 0
4088 #if VMA_USE_STL_VECTOR
4092 #if VMA_USE_STL_UNORDERED_MAP
4093 #include <unordered_map>
4096 #if VMA_USE_STL_LIST
4105 #include <algorithm>
4110 #define VMA_NULL nullptr
4113 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4115 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4118 if(alignment <
sizeof(
void*))
4120 alignment =
sizeof(
void*);
4123 return memalign(alignment, size);
4125 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4128 #if defined(__APPLE__)
4129 #include <AvailabilityMacros.h>
4132 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4134 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4135 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4142 if (__builtin_available(macOS 10.15, iOS 13, *))
4143 return aligned_alloc(alignment, size);
4147 if(alignment <
sizeof(
void*))
4149 alignment =
sizeof(
void*);
4153 if(posix_memalign(&pointer, alignment, size) == 0)
4157 #elif defined(_WIN32)
4158 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4160 return _aligned_malloc(size, alignment);
4163 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4165 return aligned_alloc(alignment, size);
4170 static void vma_aligned_free(
void* ptr)
4175 static void vma_aligned_free(
void* ptr)
4189 #define VMA_ASSERT(expr)
4191 #define VMA_ASSERT(expr) assert(expr)
4197 #ifndef VMA_HEAVY_ASSERT
4199 #define VMA_HEAVY_ASSERT(expr)
4201 #define VMA_HEAVY_ASSERT(expr)
4205 #ifndef VMA_ALIGN_OF
4206 #define VMA_ALIGN_OF(type) (__alignof(type))
4209 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4210 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4213 #ifndef VMA_SYSTEM_ALIGNED_FREE
4215 #if defined(VMA_SYSTEM_FREE)
4216 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4218 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4223 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4227 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4231 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4235 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4238 #ifndef VMA_DEBUG_LOG
4239 #define VMA_DEBUG_LOG(format, ...)
4249 #if VMA_STATS_STRING_ENABLED
4250 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4252 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4254 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4256 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4258 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4260 snprintf(outStr, strLen,
"%p", ptr);
4268 void Lock() { m_Mutex.lock(); }
4269 void Unlock() { m_Mutex.unlock(); }
4270 bool TryLock() {
return m_Mutex.try_lock(); }
4274 #define VMA_MUTEX VmaMutex
4278 #ifndef VMA_RW_MUTEX
4279 #if VMA_USE_STL_SHARED_MUTEX
4281 #include <shared_mutex>
4285 void LockRead() { m_Mutex.lock_shared(); }
4286 void UnlockRead() { m_Mutex.unlock_shared(); }
4287 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4288 void LockWrite() { m_Mutex.lock(); }
4289 void UnlockWrite() { m_Mutex.unlock(); }
4290 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4292 std::shared_mutex m_Mutex;
4294 #define VMA_RW_MUTEX VmaRWMutex
4295 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4301 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4302 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4303 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4304 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4305 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4306 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4307 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4311 #define VMA_RW_MUTEX VmaRWMutex
4317 void LockRead() { m_Mutex.Lock(); }
4318 void UnlockRead() { m_Mutex.Unlock(); }
4319 bool TryLockRead() {
return m_Mutex.TryLock(); }
4320 void LockWrite() { m_Mutex.Lock(); }
4321 void UnlockWrite() { m_Mutex.Unlock(); }
4322 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4326 #define VMA_RW_MUTEX VmaRWMutex
4333 #ifndef VMA_ATOMIC_UINT32
4335 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4338 #ifndef VMA_ATOMIC_UINT64
4340 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4343 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4348 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4351 #ifndef VMA_DEBUG_ALIGNMENT
4356 #define VMA_DEBUG_ALIGNMENT (1)
4359 #ifndef VMA_DEBUG_MARGIN
4364 #define VMA_DEBUG_MARGIN (0)
4367 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4372 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4375 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4381 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4384 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4389 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4392 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4397 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4400 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
4405 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
4408 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4410 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4413 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4415 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4418 #ifndef VMA_CLASS_NO_COPY
4419 #define VMA_CLASS_NO_COPY(className) \
4421 className(const className&) = delete; \
4422 className& operator=(const className&) = delete;
4425 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4428 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4430 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4431 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4439 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4440 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4441 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4443 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4445 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4446 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4449 static inline uint32_t VmaCountBitsSet(uint32_t v)
4451 uint32_t c = v - ((v >> 1) & 0x55555555);
4452 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4453 c = ((c >> 4) + c) & 0x0F0F0F0F;
4454 c = ((c >> 8) + c) & 0x00FF00FF;
4455 c = ((c >> 16) + c) & 0x0000FFFF;
4464 template <
typename T>
4465 inline bool VmaIsPow2(T x)
4467 return (x & (x-1)) == 0;
4472 template <
typename T>
4473 static inline T VmaAlignUp(T val, T alignment)
4475 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4476 return (val + alignment - 1) & ~(alignment - 1);
4480 template <
typename T>
4481 static inline T VmaAlignDown(T val, T alignment)
4483 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4484 return val & ~(alignment - 1);
4488 template <
typename T>
4489 static inline T VmaRoundDiv(T x, T y)
4491 return (x + (y / (T)2)) / y;
4495 static inline uint32_t VmaNextPow2(uint32_t v)
4506 static inline uint64_t VmaNextPow2(uint64_t v)
4520 static inline uint32_t VmaPrevPow2(uint32_t v)
4530 static inline uint64_t VmaPrevPow2(uint64_t v)
4542 static inline bool VmaStrIsEmpty(
const char* pStr)
4544 return pStr == VMA_NULL || *pStr ==
'\0';
4547 #if VMA_STATS_STRING_ENABLED
4549 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4569 template<
typename Iterator,
typename Compare>
4570 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4572 Iterator centerValue = end; --centerValue;
4573 Iterator insertIndex = beg;
4574 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4576 if(cmp(*memTypeIndex, *centerValue))
4578 if(insertIndex != memTypeIndex)
4580 VMA_SWAP(*memTypeIndex, *insertIndex);
4585 if(insertIndex != centerValue)
4587 VMA_SWAP(*insertIndex, *centerValue);
4592 template<
typename Iterator,
typename Compare>
4593 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4597 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4598 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4599 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4603 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4614 static inline bool VmaBlocksOnSamePage(
4615 VkDeviceSize resourceAOffset,
4616 VkDeviceSize resourceASize,
4617 VkDeviceSize resourceBOffset,
4618 VkDeviceSize pageSize)
4620 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4621 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4622 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4623 VkDeviceSize resourceBStart = resourceBOffset;
4624 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4625 return resourceAEndPage == resourceBStartPage;
4628 enum VmaSuballocationType
4630 VMA_SUBALLOCATION_TYPE_FREE = 0,
4631 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4632 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4633 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4634 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4635 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4636 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4645 static inline bool VmaIsBufferImageGranularityConflict(
4646 VmaSuballocationType suballocType1,
4647 VmaSuballocationType suballocType2)
4649 if(suballocType1 > suballocType2)
4651 VMA_SWAP(suballocType1, suballocType2);
4654 switch(suballocType1)
4656 case VMA_SUBALLOCATION_TYPE_FREE:
4658 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4660 case VMA_SUBALLOCATION_TYPE_BUFFER:
4662 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4663 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4664 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4666 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4667 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4668 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4669 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4671 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4672 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4680 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4682 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4683 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4684 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4685 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4687 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4694 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4696 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4697 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4698 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4699 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4701 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4714 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4716 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4717 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4718 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4719 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4725 VMA_CLASS_NO_COPY(VmaMutexLock)
4727 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4728 m_pMutex(useMutex ? &mutex : VMA_NULL)
4729 {
if(m_pMutex) { m_pMutex->Lock(); } }
4731 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4733 VMA_MUTEX* m_pMutex;
4737 struct VmaMutexLockRead
4739 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4741 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4742 m_pMutex(useMutex ? &mutex : VMA_NULL)
4743 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4744 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4746 VMA_RW_MUTEX* m_pMutex;
4750 struct VmaMutexLockWrite
4752 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4754 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4755 m_pMutex(useMutex ? &mutex : VMA_NULL)
4756 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4757 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4759 VMA_RW_MUTEX* m_pMutex;
4762 #if VMA_DEBUG_GLOBAL_MUTEX
4763 static VMA_MUTEX gDebugGlobalMutex;
4764 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4766 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4770 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4781 template <
typename CmpLess,
typename IterT,
typename KeyT>
4782 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4784 size_t down = 0, up = (end - beg);
4787 const size_t mid = down + (up - down) / 2;
4788 if(cmp(*(beg+mid), key))
4800 template<
typename CmpLess,
typename IterT,
typename KeyT>
4801 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4803 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4804 beg, end, value, cmp);
4806 (!cmp(*it, value) && !cmp(value, *it)))
4818 template<
typename T>
4819 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4821 for(uint32_t i = 0; i < count; ++i)
4823 const T iPtr = arr[i];
4824 if(iPtr == VMA_NULL)
4828 for(uint32_t j = i + 1; j < count; ++j)
4839 template<
typename MainT,
typename NewT>
4840 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4842 newStruct->pNext = mainStruct->pNext;
4843 mainStruct->pNext = newStruct;
4849 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4851 void* result = VMA_NULL;
4852 if((pAllocationCallbacks != VMA_NULL) &&
4853 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4855 result = (*pAllocationCallbacks->pfnAllocation)(
4856 pAllocationCallbacks->pUserData,
4859 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4863 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4865 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4869 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4871 if((pAllocationCallbacks != VMA_NULL) &&
4872 (pAllocationCallbacks->pfnFree != VMA_NULL))
4874 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4878 VMA_SYSTEM_ALIGNED_FREE(ptr);
4882 template<
typename T>
4883 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4885 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4888 template<
typename T>
4889 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4891 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4894 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4896 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4898 template<
typename T>
4899 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4902 VmaFree(pAllocationCallbacks, ptr);
4905 template<
typename T>
4906 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4910 for(
size_t i = count; i--; )
4914 VmaFree(pAllocationCallbacks, ptr);
4918 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4920 if(srcStr != VMA_NULL)
4922 const size_t len = strlen(srcStr);
4923 char*
const result = vma_new_array(allocs,
char, len + 1);
4924 memcpy(result, srcStr, len + 1);
4933 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4937 const size_t len = strlen(str);
4938 vma_delete_array(allocs, str, len + 1);
4943 template<
typename T>
4944 class VmaStlAllocator
4947 const VkAllocationCallbacks*
const m_pCallbacks;
4948 typedef T value_type;
4950 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4951 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4953 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4954 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4956 template<
typename U>
4957 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4959 return m_pCallbacks == rhs.m_pCallbacks;
4961 template<
typename U>
4962 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4964 return m_pCallbacks != rhs.m_pCallbacks;
4967 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4970 #if VMA_USE_STL_VECTOR
4972 #define VmaVector std::vector
4974 template<
typename T,
typename allocatorT>
4975 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4977 vec.insert(vec.begin() + index, item);
4980 template<
typename T,
typename allocatorT>
4981 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4983 vec.erase(vec.begin() + index);
4991 template<
typename T,
typename AllocatorT>
4995 typedef T value_type;
4997 VmaVector(
const AllocatorT& allocator) :
4998 m_Allocator(allocator),
5005 VmaVector(
size_t count,
const AllocatorT& allocator) :
5006 m_Allocator(allocator),
5007 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
5015 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
5016 : VmaVector(count, allocator) {}
5018 VmaVector(
const VmaVector<T, AllocatorT>& src) :
5019 m_Allocator(src.m_Allocator),
5020 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
5021 m_Count(src.m_Count),
5022 m_Capacity(src.m_Count)
5026 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
5032 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5035 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5039 resize(rhs.m_Count);
5042 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5048 bool empty()
const {
return m_Count == 0; }
5049 size_t size()
const {
return m_Count; }
5050 T* data() {
return m_pArray; }
5051 const T* data()
const {
return m_pArray; }
5053 T& operator[](
size_t index)
5055 VMA_HEAVY_ASSERT(index < m_Count);
5056 return m_pArray[index];
5058 const T& operator[](
size_t index)
const
5060 VMA_HEAVY_ASSERT(index < m_Count);
5061 return m_pArray[index];
5066 VMA_HEAVY_ASSERT(m_Count > 0);
5069 const T& front()
const
5071 VMA_HEAVY_ASSERT(m_Count > 0);
5076 VMA_HEAVY_ASSERT(m_Count > 0);
5077 return m_pArray[m_Count - 1];
5079 const T& back()
const
5081 VMA_HEAVY_ASSERT(m_Count > 0);
5082 return m_pArray[m_Count - 1];
5085 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5087 newCapacity = VMA_MAX(newCapacity, m_Count);
5089 if((newCapacity < m_Capacity) && !freeMemory)
5091 newCapacity = m_Capacity;
5094 if(newCapacity != m_Capacity)
5096 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5099 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5101 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5102 m_Capacity = newCapacity;
5103 m_pArray = newArray;
5107 void resize(
size_t newCount,
bool freeMemory =
false)
5109 size_t newCapacity = m_Capacity;
5110 if(newCount > m_Capacity)
5112 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5116 newCapacity = newCount;
5119 if(newCapacity != m_Capacity)
5121 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5122 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5123 if(elementsToCopy != 0)
5125 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5127 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5128 m_Capacity = newCapacity;
5129 m_pArray = newArray;
5135 void clear(
bool freeMemory =
false)
5137 resize(0, freeMemory);
5140 void insert(
size_t index,
const T& src)
5142 VMA_HEAVY_ASSERT(index <= m_Count);
5143 const size_t oldCount = size();
5144 resize(oldCount + 1);
5145 if(index < oldCount)
5147 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5149 m_pArray[index] = src;
5152 void remove(
size_t index)
5154 VMA_HEAVY_ASSERT(index < m_Count);
5155 const size_t oldCount = size();
5156 if(index < oldCount - 1)
5158 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5160 resize(oldCount - 1);
5163 void push_back(
const T& src)
5165 const size_t newIndex = size();
5166 resize(newIndex + 1);
5167 m_pArray[newIndex] = src;
5172 VMA_HEAVY_ASSERT(m_Count > 0);
5176 void push_front(
const T& src)
5183 VMA_HEAVY_ASSERT(m_Count > 0);
5187 typedef T* iterator;
5189 iterator begin() {
return m_pArray; }
5190 iterator end() {
return m_pArray + m_Count; }
5193 AllocatorT m_Allocator;
5199 template<
typename T,
typename allocatorT>
5200 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5202 vec.insert(index, item);
5205 template<
typename T,
typename allocatorT>
5206 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5213 template<
typename CmpLess,
typename VectorT>
5214 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5216 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5218 vector.data() + vector.size(),
5220 CmpLess()) - vector.data();
5221 VmaVectorInsert(vector, indexToInsert, value);
5222 return indexToInsert;
5225 template<
typename CmpLess,
typename VectorT>
5226 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5229 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5234 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5236 size_t indexToRemove = it - vector.begin();
5237 VmaVectorRemove(vector, indexToRemove);
5254 template<
typename T,
typename AllocatorT,
size_t N>
5255 class VmaSmallVector
5258 typedef T value_type;
5260 VmaSmallVector(
const AllocatorT& allocator) :
5262 m_DynamicArray(allocator)
5265 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5267 m_DynamicArray(count > N ? count : 0, allocator)
5270 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5271 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5272 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5273 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5275 bool empty()
const {
return m_Count == 0; }
5276 size_t size()
const {
return m_Count; }
5277 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5278 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5280 T& operator[](
size_t index)
5282 VMA_HEAVY_ASSERT(index < m_Count);
5283 return data()[index];
5285 const T& operator[](
size_t index)
const
5287 VMA_HEAVY_ASSERT(index < m_Count);
5288 return data()[index];
5293 VMA_HEAVY_ASSERT(m_Count > 0);
5296 const T& front()
const
5298 VMA_HEAVY_ASSERT(m_Count > 0);
5303 VMA_HEAVY_ASSERT(m_Count > 0);
5304 return data()[m_Count - 1];
5306 const T& back()
const
5308 VMA_HEAVY_ASSERT(m_Count > 0);
5309 return data()[m_Count - 1];
5312 void resize(
size_t newCount,
bool freeMemory =
false)
5314 if(newCount > N && m_Count > N)
5317 m_DynamicArray.resize(newCount, freeMemory);
5319 else if(newCount > N && m_Count <= N)
5322 m_DynamicArray.resize(newCount, freeMemory);
5325 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5328 else if(newCount <= N && m_Count > N)
5333 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5335 m_DynamicArray.resize(0, freeMemory);
5344 void clear(
bool freeMemory =
false)
5346 m_DynamicArray.clear(freeMemory);
5350 void insert(
size_t index,
const T& src)
5352 VMA_HEAVY_ASSERT(index <= m_Count);
5353 const size_t oldCount = size();
5354 resize(oldCount + 1);
5355 T*
const dataPtr = data();
5356 if(index < oldCount)
5359 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5361 dataPtr[index] = src;
5364 void remove(
size_t index)
5366 VMA_HEAVY_ASSERT(index < m_Count);
5367 const size_t oldCount = size();
5368 if(index < oldCount - 1)
5371 T*
const dataPtr = data();
5372 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5374 resize(oldCount - 1);
5377 void push_back(
const T& src)
5379 const size_t newIndex = size();
5380 resize(newIndex + 1);
5381 data()[newIndex] = src;
5386 VMA_HEAVY_ASSERT(m_Count > 0);
5390 void push_front(
const T& src)
5397 VMA_HEAVY_ASSERT(m_Count > 0);
5401 typedef T* iterator;
5403 iterator begin() {
return data(); }
5404 iterator end() {
return data() + m_Count; }
5409 VmaVector<T, AllocatorT> m_DynamicArray;
5420 template<
typename T>
5421 class VmaPoolAllocator
5423 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5425 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5426 ~VmaPoolAllocator();
5427 template<
typename... Types> T* Alloc(Types... args);
5433 uint32_t NextFreeIndex;
5434 alignas(T)
char Value[
sizeof(T)];
5441 uint32_t FirstFreeIndex;
5444 const VkAllocationCallbacks* m_pAllocationCallbacks;
5445 const uint32_t m_FirstBlockCapacity;
5446 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5448 ItemBlock& CreateNewBlock();
5451 template<
typename T>
5452 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5453 m_pAllocationCallbacks(pAllocationCallbacks),
5454 m_FirstBlockCapacity(firstBlockCapacity),
5455 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5457 VMA_ASSERT(m_FirstBlockCapacity > 1);
5460 template<
typename T>
5461 VmaPoolAllocator<T>::~VmaPoolAllocator()
5463 for(
size_t i = m_ItemBlocks.size(); i--; )
5464 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5465 m_ItemBlocks.clear();
5468 template<
typename T>
5469 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5471 for(
size_t i = m_ItemBlocks.size(); i--; )
5473 ItemBlock& block = m_ItemBlocks[i];
5475 if(block.FirstFreeIndex != UINT32_MAX)
5477 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5478 block.FirstFreeIndex = pItem->NextFreeIndex;
5479 T* result = (T*)&pItem->Value;
5480 new(result)T(std::forward<Types>(args)...);
5486 ItemBlock& newBlock = CreateNewBlock();
5487 Item*
const pItem = &newBlock.pItems[0];
5488 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5489 T* result = (T*)&pItem->Value;
5490 new(result)T(std::forward<Types>(args)...);
5494 template<
typename T>
5495 void VmaPoolAllocator<T>::Free(T* ptr)
5498 for(
size_t i = m_ItemBlocks.size(); i--; )
5500 ItemBlock& block = m_ItemBlocks[i];
5504 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5507 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5510 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5511 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5512 block.FirstFreeIndex = index;
5516 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5519 template<
typename T>
5520 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5522 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5523 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5525 const ItemBlock newBlock = {
5526 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5530 m_ItemBlocks.push_back(newBlock);
5533 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5534 newBlock.pItems[i].NextFreeIndex = i + 1;
5535 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5536 return m_ItemBlocks.back();
5542 #if VMA_USE_STL_LIST
5544 #define VmaList std::list
5548 template<
typename T>
5557 template<
typename T>
5560 VMA_CLASS_NO_COPY(VmaRawList)
5562 typedef VmaListItem<T> ItemType;
5564 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5568 size_t GetCount()
const {
return m_Count; }
5569 bool IsEmpty()
const {
return m_Count == 0; }
5571 ItemType* Front() {
return m_pFront; }
5572 const ItemType* Front()
const {
return m_pFront; }
5573 ItemType* Back() {
return m_pBack; }
5574 const ItemType* Back()
const {
return m_pBack; }
5576 ItemType* PushBack();
5577 ItemType* PushFront();
5578 ItemType* PushBack(
const T& value);
5579 ItemType* PushFront(
const T& value);
5584 ItemType* InsertBefore(ItemType* pItem);
5586 ItemType* InsertAfter(ItemType* pItem);
5588 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5589 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5591 void Remove(ItemType* pItem);
5594 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5595 VmaPoolAllocator<ItemType> m_ItemAllocator;
5601 template<
typename T>
5602 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5603 m_pAllocationCallbacks(pAllocationCallbacks),
5604 m_ItemAllocator(pAllocationCallbacks, 128),
5611 template<
typename T>
5612 VmaRawList<T>::~VmaRawList()
5618 template<
typename T>
5619 void VmaRawList<T>::Clear()
5621 if(IsEmpty() ==
false)
5623 ItemType* pItem = m_pBack;
5624 while(pItem != VMA_NULL)
5626 ItemType*
const pPrevItem = pItem->pPrev;
5627 m_ItemAllocator.Free(pItem);
5630 m_pFront = VMA_NULL;
5636 template<
typename T>
5637 VmaListItem<T>* VmaRawList<T>::PushBack()
5639 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5640 pNewItem->pNext = VMA_NULL;
5643 pNewItem->pPrev = VMA_NULL;
5644 m_pFront = pNewItem;
5650 pNewItem->pPrev = m_pBack;
5651 m_pBack->pNext = pNewItem;
5658 template<
typename T>
5659 VmaListItem<T>* VmaRawList<T>::PushFront()
5661 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5662 pNewItem->pPrev = VMA_NULL;
5665 pNewItem->pNext = VMA_NULL;
5666 m_pFront = pNewItem;
5672 pNewItem->pNext = m_pFront;
5673 m_pFront->pPrev = pNewItem;
5674 m_pFront = pNewItem;
5680 template<
typename T>
5681 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5683 ItemType*
const pNewItem = PushBack();
5684 pNewItem->Value = value;
5688 template<
typename T>
5689 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5691 ItemType*
const pNewItem = PushFront();
5692 pNewItem->Value = value;
5696 template<
typename T>
5697 void VmaRawList<T>::PopBack()
5699 VMA_HEAVY_ASSERT(m_Count > 0);
5700 ItemType*
const pBackItem = m_pBack;
5701 ItemType*
const pPrevItem = pBackItem->pPrev;
5702 if(pPrevItem != VMA_NULL)
5704 pPrevItem->pNext = VMA_NULL;
5706 m_pBack = pPrevItem;
5707 m_ItemAllocator.Free(pBackItem);
5711 template<
typename T>
5712 void VmaRawList<T>::PopFront()
5714 VMA_HEAVY_ASSERT(m_Count > 0);
5715 ItemType*
const pFrontItem = m_pFront;
5716 ItemType*
const pNextItem = pFrontItem->pNext;
5717 if(pNextItem != VMA_NULL)
5719 pNextItem->pPrev = VMA_NULL;
5721 m_pFront = pNextItem;
5722 m_ItemAllocator.Free(pFrontItem);
5726 template<
typename T>
5727 void VmaRawList<T>::Remove(ItemType* pItem)
5729 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5730 VMA_HEAVY_ASSERT(m_Count > 0);
5732 if(pItem->pPrev != VMA_NULL)
5734 pItem->pPrev->pNext = pItem->pNext;
5738 VMA_HEAVY_ASSERT(m_pFront == pItem);
5739 m_pFront = pItem->pNext;
5742 if(pItem->pNext != VMA_NULL)
5744 pItem->pNext->pPrev = pItem->pPrev;
5748 VMA_HEAVY_ASSERT(m_pBack == pItem);
5749 m_pBack = pItem->pPrev;
5752 m_ItemAllocator.Free(pItem);
5756 template<
typename T>
5757 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5759 if(pItem != VMA_NULL)
5761 ItemType*
const prevItem = pItem->pPrev;
5762 ItemType*
const newItem = m_ItemAllocator.Alloc();
5763 newItem->pPrev = prevItem;
5764 newItem->pNext = pItem;
5765 pItem->pPrev = newItem;
5766 if(prevItem != VMA_NULL)
5768 prevItem->pNext = newItem;
5772 VMA_HEAVY_ASSERT(m_pFront == pItem);
5782 template<
typename T>
5783 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5785 if(pItem != VMA_NULL)
5787 ItemType*
const nextItem = pItem->pNext;
5788 ItemType*
const newItem = m_ItemAllocator.Alloc();
5789 newItem->pNext = nextItem;
5790 newItem->pPrev = pItem;
5791 pItem->pNext = newItem;
5792 if(nextItem != VMA_NULL)
5794 nextItem->pPrev = newItem;
5798 VMA_HEAVY_ASSERT(m_pBack == pItem);
5808 template<
typename T>
5809 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5811 ItemType*
const newItem = InsertBefore(pItem);
5812 newItem->Value = value;
5816 template<
typename T>
5817 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5819 ItemType*
const newItem = InsertAfter(pItem);
5820 newItem->Value = value;
5824 template<
typename T,
typename AllocatorT>
5827 VMA_CLASS_NO_COPY(VmaList)
5838 T& operator*()
const
5840 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5841 return m_pItem->Value;
5843 T* operator->()
const
5845 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5846 return &m_pItem->Value;
5849 iterator& operator++()
5851 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5852 m_pItem = m_pItem->pNext;
5855 iterator& operator--()
5857 if(m_pItem != VMA_NULL)
5859 m_pItem = m_pItem->pPrev;
5863 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5864 m_pItem = m_pList->Back();
5869 iterator operator++(
int)
5871 iterator result = *
this;
5875 iterator operator--(
int)
5877 iterator result = *
this;
5882 bool operator==(
const iterator& rhs)
const
5884 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5885 return m_pItem == rhs.m_pItem;
5887 bool operator!=(
const iterator& rhs)
const
5889 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5890 return m_pItem != rhs.m_pItem;
5894 VmaRawList<T>* m_pList;
5895 VmaListItem<T>* m_pItem;
5897 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5903 friend class VmaList<T, AllocatorT>;
5906 class const_iterator
5915 const_iterator(
const iterator& src) :
5916 m_pList(src.m_pList),
5917 m_pItem(src.m_pItem)
5921 const T& operator*()
const
5923 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5924 return m_pItem->Value;
5926 const T* operator->()
const
5928 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5929 return &m_pItem->Value;
5932 const_iterator& operator++()
5934 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5935 m_pItem = m_pItem->pNext;
5938 const_iterator& operator--()
5940 if(m_pItem != VMA_NULL)
5942 m_pItem = m_pItem->pPrev;
5946 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5947 m_pItem = m_pList->Back();
5952 const_iterator operator++(
int)
5954 const_iterator result = *
this;
5958 const_iterator operator--(
int)
5960 const_iterator result = *
this;
5965 bool operator==(
const const_iterator& rhs)
const
5967 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5968 return m_pItem == rhs.m_pItem;
5970 bool operator!=(
const const_iterator& rhs)
const
5972 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5973 return m_pItem != rhs.m_pItem;
5977 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5983 const VmaRawList<T>* m_pList;
5984 const VmaListItem<T>* m_pItem;
5986 friend class VmaList<T, AllocatorT>;
5989 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5991 bool empty()
const {
return m_RawList.IsEmpty(); }
5992 size_t size()
const {
return m_RawList.GetCount(); }
5994 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5995 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5997 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5998 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
6000 void clear() { m_RawList.Clear(); }
6001 void push_back(
const T& value) { m_RawList.PushBack(value); }
6002 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
6003 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
6006 VmaRawList<T> m_RawList;
6025 template<
typename ItemTypeTraits>
6026 class VmaIntrusiveLinkedList
6029 typedef typename ItemTypeTraits::ItemType ItemType;
6030 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
6031 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
6033 VmaIntrusiveLinkedList() { }
6034 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6035 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
6036 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
6038 src.m_Front = src.m_Back = VMA_NULL;
6041 ~VmaIntrusiveLinkedList()
6043 VMA_HEAVY_ASSERT(IsEmpty());
6045 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6046 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
6050 VMA_HEAVY_ASSERT(IsEmpty());
6051 m_Front = src.m_Front;
6052 m_Back = src.m_Back;
6053 m_Count = src.m_Count;
6054 src.m_Front = src.m_Back = VMA_NULL;
6063 ItemType* item = m_Back;
6064 while(item != VMA_NULL)
6066 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
6067 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6068 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6076 size_t GetCount()
const {
return m_Count; }
6077 bool IsEmpty()
const {
return m_Count == 0; }
6078 ItemType* Front() {
return m_Front; }
6079 const ItemType* Front()
const {
return m_Front; }
6080 ItemType* Back() {
return m_Back; }
6081 const ItemType* Back()
const {
return m_Back; }
6082 void PushBack(ItemType* item)
6084 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6093 ItemTypeTraits::AccessPrev(item) = m_Back;
6094 ItemTypeTraits::AccessNext(m_Back) = item;
6099 void PushFront(ItemType* item)
6101 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6110 ItemTypeTraits::AccessNext(item) = m_Front;
6111 ItemTypeTraits::AccessPrev(m_Front) = item;
6118 VMA_HEAVY_ASSERT(m_Count > 0);
6119 ItemType*
const backItem = m_Back;
6120 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
6121 if(prevItem != VMA_NULL)
6123 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
6127 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
6128 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
6131 ItemType* PopFront()
6133 VMA_HEAVY_ASSERT(m_Count > 0);
6134 ItemType*
const frontItem = m_Front;
6135 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
6136 if(nextItem != VMA_NULL)
6138 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
6142 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
6143 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
6148 void InsertBefore(ItemType* existingItem, ItemType* newItem)
6150 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6151 if(existingItem != VMA_NULL)
6153 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
6154 ItemTypeTraits::AccessPrev(newItem) = prevItem;
6155 ItemTypeTraits::AccessNext(newItem) = existingItem;
6156 ItemTypeTraits::AccessPrev(existingItem) = newItem;
6157 if(prevItem != VMA_NULL)
6159 ItemTypeTraits::AccessNext(prevItem) = newItem;
6163 VMA_HEAVY_ASSERT(m_Front == existingItem);
6172 void InsertAfter(ItemType* existingItem, ItemType* newItem)
6174 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6175 if(existingItem != VMA_NULL)
6177 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
6178 ItemTypeTraits::AccessNext(newItem) = nextItem;
6179 ItemTypeTraits::AccessPrev(newItem) = existingItem;
6180 ItemTypeTraits::AccessNext(existingItem) = newItem;
6181 if(nextItem != VMA_NULL)
6183 ItemTypeTraits::AccessPrev(nextItem) = newItem;
6187 VMA_HEAVY_ASSERT(m_Back == existingItem);
6193 return PushFront(newItem);
6195 void Remove(ItemType* item)
6197 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
6198 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
6200 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
6204 VMA_HEAVY_ASSERT(m_Front == item);
6205 m_Front = ItemTypeTraits::GetNext(item);
6208 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
6210 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
6214 VMA_HEAVY_ASSERT(m_Back == item);
6215 m_Back = ItemTypeTraits::GetPrev(item);
6217 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6218 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6222 ItemType* m_Front = VMA_NULL;
6223 ItemType* m_Back = VMA_NULL;
6233 #if VMA_USE_STL_UNORDERED_MAP
6235 #define VmaPair std::pair
6237 #define VMA_MAP_TYPE(KeyT, ValueT) \
6238 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
6242 template<
typename T1,
typename T2>
6248 VmaPair() : first(), second() { }
6249 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6255 template<
typename KeyT,
typename ValueT>
6259 typedef VmaPair<KeyT, ValueT> PairType;
6260 typedef PairType* iterator;
6262 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6264 iterator begin() {
return m_Vector.begin(); }
6265 iterator end() {
return m_Vector.end(); }
6267 void insert(
const PairType& pair);
6268 iterator find(
const KeyT& key);
6269 void erase(iterator it);
6272 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6275 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6277 template<
typename FirstT,
typename SecondT>
6278 struct VmaPairFirstLess
6280 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6282 return lhs.first < rhs.first;
6284 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6286 return lhs.first < rhsFirst;
6290 template<
typename KeyT,
typename ValueT>
6291 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6293 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6295 m_Vector.data() + m_Vector.size(),
6297 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6298 VmaVectorInsert(m_Vector, indexToInsert, pair);
6301 template<
typename KeyT,
typename ValueT>
6302 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6304 PairType* it = VmaBinaryFindFirstNotLess(
6306 m_Vector.data() + m_Vector.size(),
6308 VmaPairFirstLess<KeyT, ValueT>());
6309 if((it != m_Vector.end()) && (it->first == key))
6315 return m_Vector.end();
6319 template<
typename KeyT,
typename ValueT>
6320 void VmaMap<KeyT, ValueT>::erase(iterator it)
6322 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6331 class VmaDeviceMemoryBlock;
6333 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6335 struct VmaAllocation_T
6338 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6342 FLAG_USER_DATA_STRING = 0x01,
6346 enum ALLOCATION_TYPE
6348 ALLOCATION_TYPE_NONE,
6349 ALLOCATION_TYPE_BLOCK,
6350 ALLOCATION_TYPE_DEDICATED,
6357 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6360 m_pUserData{VMA_NULL},
6361 m_LastUseFrameIndex{currentFrameIndex},
6362 m_MemoryTypeIndex{0},
6363 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6364 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6366 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6368 #if VMA_STATS_STRING_ENABLED
6369 m_CreationFrameIndex = currentFrameIndex;
6370 m_BufferImageUsage = 0;
6376 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6379 VMA_ASSERT(m_pUserData == VMA_NULL);
6382 void InitBlockAllocation(
6383 VmaDeviceMemoryBlock* block,
6384 VkDeviceSize offset,
6385 VkDeviceSize alignment,
6387 uint32_t memoryTypeIndex,
6388 VmaSuballocationType suballocationType,
6392 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6393 VMA_ASSERT(block != VMA_NULL);
6394 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6395 m_Alignment = alignment;
6397 m_MemoryTypeIndex = memoryTypeIndex;
6398 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6399 m_SuballocationType = (uint8_t)suballocationType;
6400 m_BlockAllocation.m_Block = block;
6401 m_BlockAllocation.m_Offset = offset;
6402 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6407 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6408 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6409 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6410 m_MemoryTypeIndex = 0;
6411 m_BlockAllocation.m_Block = VMA_NULL;
6412 m_BlockAllocation.m_Offset = 0;
6413 m_BlockAllocation.m_CanBecomeLost =
true;
6416 void ChangeBlockAllocation(
6418 VmaDeviceMemoryBlock* block,
6419 VkDeviceSize offset);
6421 void ChangeOffset(VkDeviceSize newOffset);
6424 void InitDedicatedAllocation(
6425 uint32_t memoryTypeIndex,
6426 VkDeviceMemory hMemory,
6427 VmaSuballocationType suballocationType,
6431 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6432 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6433 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6436 m_MemoryTypeIndex = memoryTypeIndex;
6437 m_SuballocationType = (uint8_t)suballocationType;
6438 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6439 m_DedicatedAllocation.m_hMemory = hMemory;
6440 m_DedicatedAllocation.m_pMappedData = pMappedData;
6441 m_DedicatedAllocation.m_Prev = VMA_NULL;
6442 m_DedicatedAllocation.m_Next = VMA_NULL;
6445 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6446 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6447 VkDeviceSize GetSize()
const {
return m_Size; }
6448 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6449 void* GetUserData()
const {
return m_pUserData; }
6450 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6451 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6453 VmaDeviceMemoryBlock* GetBlock()
const
6455 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6456 return m_BlockAllocation.m_Block;
6458 VkDeviceSize GetOffset()
const;
6459 VkDeviceMemory GetMemory()
const;
6460 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6461 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6462 void* GetMappedData()
const;
6463 bool CanBecomeLost()
const;
6465 uint32_t GetLastUseFrameIndex()
const
6467 return m_LastUseFrameIndex.load();
6469 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6471 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6481 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6483 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6485 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6496 void BlockAllocMap();
6497 void BlockAllocUnmap();
6498 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6501 #if VMA_STATS_STRING_ENABLED
6502 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6503 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6505 void InitBufferImageUsage(uint32_t bufferImageUsage)
6507 VMA_ASSERT(m_BufferImageUsage == 0);
6508 m_BufferImageUsage = bufferImageUsage;
6511 void PrintParameters(
class VmaJsonWriter& json)
const;
6515 VkDeviceSize m_Alignment;
6516 VkDeviceSize m_Size;
6518 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6519 uint32_t m_MemoryTypeIndex;
6521 uint8_t m_SuballocationType;
6528 struct BlockAllocation
6530 VmaDeviceMemoryBlock* m_Block;
6531 VkDeviceSize m_Offset;
6532 bool m_CanBecomeLost;
6536 struct DedicatedAllocation
6538 VkDeviceMemory m_hMemory;
6539 void* m_pMappedData;
6540 VmaAllocation_T* m_Prev;
6541 VmaAllocation_T* m_Next;
6547 BlockAllocation m_BlockAllocation;
6549 DedicatedAllocation m_DedicatedAllocation;
6552 #if VMA_STATS_STRING_ENABLED
6553 uint32_t m_CreationFrameIndex;
6554 uint32_t m_BufferImageUsage;
6559 friend struct VmaDedicatedAllocationListItemTraits;
6562 struct VmaDedicatedAllocationListItemTraits
6564 typedef VmaAllocation_T ItemType;
6565 static ItemType* GetPrev(
const ItemType* item)
6567 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6568 return item->m_DedicatedAllocation.m_Prev;
6570 static ItemType* GetNext(
const ItemType* item)
6572 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6573 return item->m_DedicatedAllocation.m_Next;
6575 static ItemType*& AccessPrev(ItemType* item)
6577 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6578 return item->m_DedicatedAllocation.m_Prev;
6580 static ItemType*& AccessNext(ItemType* item){
6581 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6582 return item->m_DedicatedAllocation.m_Next;
6590 struct VmaSuballocation
6592 VkDeviceSize offset;
6595 VmaSuballocationType type;
6599 struct VmaSuballocationOffsetLess
6601 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6603 return lhs.offset < rhs.offset;
6606 struct VmaSuballocationOffsetGreater
6608 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6610 return lhs.offset > rhs.offset;
6614 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6617 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6619 enum class VmaAllocationRequestType
6641 struct VmaAllocationRequest
6643 VkDeviceSize offset;
6644 VkDeviceSize sumFreeSize;
6645 VkDeviceSize sumItemSize;
6646 VmaSuballocationList::iterator item;
6647 size_t itemsToMakeLostCount;
6649 VmaAllocationRequestType type;
6651 VkDeviceSize CalcCost()
const
6653 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6661 class VmaBlockMetadata
6665 virtual ~VmaBlockMetadata() { }
6666 virtual void Init(VkDeviceSize size) { m_Size = size; }
6669 virtual bool Validate()
const = 0;
6670 VkDeviceSize GetSize()
const {
return m_Size; }
6671 virtual size_t GetAllocationCount()
const = 0;
6672 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6673 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6675 virtual bool IsEmpty()
const = 0;
6677 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6679 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6681 #if VMA_STATS_STRING_ENABLED
6682 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6688 virtual bool CreateAllocationRequest(
6689 uint32_t currentFrameIndex,
6690 uint32_t frameInUseCount,
6691 VkDeviceSize bufferImageGranularity,
6692 VkDeviceSize allocSize,
6693 VkDeviceSize allocAlignment,
6695 VmaSuballocationType allocType,
6696 bool canMakeOtherLost,
6699 VmaAllocationRequest* pAllocationRequest) = 0;
6701 virtual bool MakeRequestedAllocationsLost(
6702 uint32_t currentFrameIndex,
6703 uint32_t frameInUseCount,
6704 VmaAllocationRequest* pAllocationRequest) = 0;
6706 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6708 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6712 const VmaAllocationRequest& request,
6713 VmaSuballocationType type,
6714 VkDeviceSize allocSize,
6719 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6722 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6724 #if VMA_STATS_STRING_ENABLED
6725 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6726 VkDeviceSize unusedBytes,
6727 size_t allocationCount,
6728 size_t unusedRangeCount)
const;
6729 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6730 VkDeviceSize offset,
6732 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6733 VkDeviceSize offset,
6734 VkDeviceSize size)
const;
6735 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6739 VkDeviceSize m_Size;
6740 const VkAllocationCallbacks* m_pAllocationCallbacks;
6743 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6744 VMA_ASSERT(0 && "Validation failed: " #cond); \
6748 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6750 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6753 virtual ~VmaBlockMetadata_Generic();
6754 virtual void Init(VkDeviceSize size);
6756 virtual bool Validate()
const;
6757 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6758 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6759 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6760 virtual bool IsEmpty()
const;
6762 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6763 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6765 #if VMA_STATS_STRING_ENABLED
6766 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6769 virtual bool CreateAllocationRequest(
6770 uint32_t currentFrameIndex,
6771 uint32_t frameInUseCount,
6772 VkDeviceSize bufferImageGranularity,
6773 VkDeviceSize allocSize,
6774 VkDeviceSize allocAlignment,
6776 VmaSuballocationType allocType,
6777 bool canMakeOtherLost,
6779 VmaAllocationRequest* pAllocationRequest);
6781 virtual bool MakeRequestedAllocationsLost(
6782 uint32_t currentFrameIndex,
6783 uint32_t frameInUseCount,
6784 VmaAllocationRequest* pAllocationRequest);
6786 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6788 virtual VkResult CheckCorruption(
const void* pBlockData);
6791 const VmaAllocationRequest& request,
6792 VmaSuballocationType type,
6793 VkDeviceSize allocSize,
6797 virtual void FreeAtOffset(VkDeviceSize offset);
6802 bool IsBufferImageGranularityConflictPossible(
6803 VkDeviceSize bufferImageGranularity,
6804 VmaSuballocationType& inOutPrevSuballocType)
const;
6807 friend class VmaDefragmentationAlgorithm_Generic;
6808 friend class VmaDefragmentationAlgorithm_Fast;
6810 uint32_t m_FreeCount;
6811 VkDeviceSize m_SumFreeSize;
6812 VmaSuballocationList m_Suballocations;
6815 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6817 bool ValidateFreeSuballocationList()
const;
6821 bool CheckAllocation(
6822 uint32_t currentFrameIndex,
6823 uint32_t frameInUseCount,
6824 VkDeviceSize bufferImageGranularity,
6825 VkDeviceSize allocSize,
6826 VkDeviceSize allocAlignment,
6827 VmaSuballocationType allocType,
6828 VmaSuballocationList::const_iterator suballocItem,
6829 bool canMakeOtherLost,
6830 VkDeviceSize* pOffset,
6831 size_t* itemsToMakeLostCount,
6832 VkDeviceSize* pSumFreeSize,
6833 VkDeviceSize* pSumItemSize)
const;
6835 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6839 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6842 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6845 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6926 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6928 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6931 virtual ~VmaBlockMetadata_Linear();
6932 virtual void Init(VkDeviceSize size);
6934 virtual bool Validate()
const;
6935 virtual size_t GetAllocationCount()
const;
6936 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6937 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6938 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6940 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6941 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6943 #if VMA_STATS_STRING_ENABLED
6944 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6947 virtual bool CreateAllocationRequest(
6948 uint32_t currentFrameIndex,
6949 uint32_t frameInUseCount,
6950 VkDeviceSize bufferImageGranularity,
6951 VkDeviceSize allocSize,
6952 VkDeviceSize allocAlignment,
6954 VmaSuballocationType allocType,
6955 bool canMakeOtherLost,
6957 VmaAllocationRequest* pAllocationRequest);
6959 virtual bool MakeRequestedAllocationsLost(
6960 uint32_t currentFrameIndex,
6961 uint32_t frameInUseCount,
6962 VmaAllocationRequest* pAllocationRequest);
6964 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6966 virtual VkResult CheckCorruption(
const void* pBlockData);
6969 const VmaAllocationRequest& request,
6970 VmaSuballocationType type,
6971 VkDeviceSize allocSize,
6975 virtual void FreeAtOffset(VkDeviceSize offset);
6985 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6987 enum SECOND_VECTOR_MODE
6989 SECOND_VECTOR_EMPTY,
6994 SECOND_VECTOR_RING_BUFFER,
7000 SECOND_VECTOR_DOUBLE_STACK,
7003 VkDeviceSize m_SumFreeSize;
7004 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7005 uint32_t m_1stVectorIndex;
7006 SECOND_VECTOR_MODE m_2ndVectorMode;
7008 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7009 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7010 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7011 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7014 size_t m_1stNullItemsBeginCount;
7016 size_t m_1stNullItemsMiddleCount;
7018 size_t m_2ndNullItemsCount;
7020 bool ShouldCompact1st()
const;
7021 void CleanupAfterFree();
7023 bool CreateAllocationRequest_LowerAddress(
7024 uint32_t currentFrameIndex,
7025 uint32_t frameInUseCount,
7026 VkDeviceSize bufferImageGranularity,
7027 VkDeviceSize allocSize,
7028 VkDeviceSize allocAlignment,
7029 VmaSuballocationType allocType,
7030 bool canMakeOtherLost,
7032 VmaAllocationRequest* pAllocationRequest);
7033 bool CreateAllocationRequest_UpperAddress(
7034 uint32_t currentFrameIndex,
7035 uint32_t frameInUseCount,
7036 VkDeviceSize bufferImageGranularity,
7037 VkDeviceSize allocSize,
7038 VkDeviceSize allocAlignment,
7039 VmaSuballocationType allocType,
7040 bool canMakeOtherLost,
7042 VmaAllocationRequest* pAllocationRequest);
7056 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
7058 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
7061 virtual ~VmaBlockMetadata_Buddy();
7062 virtual void Init(VkDeviceSize size);
7064 virtual bool Validate()
const;
7065 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
7066 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
7067 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7068 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
7070 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7071 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7073 #if VMA_STATS_STRING_ENABLED
7074 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7077 virtual bool CreateAllocationRequest(
7078 uint32_t currentFrameIndex,
7079 uint32_t frameInUseCount,
7080 VkDeviceSize bufferImageGranularity,
7081 VkDeviceSize allocSize,
7082 VkDeviceSize allocAlignment,
7084 VmaSuballocationType allocType,
7085 bool canMakeOtherLost,
7087 VmaAllocationRequest* pAllocationRequest);
7089 virtual bool MakeRequestedAllocationsLost(
7090 uint32_t currentFrameIndex,
7091 uint32_t frameInUseCount,
7092 VmaAllocationRequest* pAllocationRequest);
7094 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7096 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
7099 const VmaAllocationRequest& request,
7100 VmaSuballocationType type,
7101 VkDeviceSize allocSize,
7104 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
7105 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
7108 static const VkDeviceSize MIN_NODE_SIZE = 32;
7109 static const size_t MAX_LEVELS = 30;
7111 struct ValidationContext
7113 size_t calculatedAllocationCount;
7114 size_t calculatedFreeCount;
7115 VkDeviceSize calculatedSumFreeSize;
7117 ValidationContext() :
7118 calculatedAllocationCount(0),
7119 calculatedFreeCount(0),
7120 calculatedSumFreeSize(0) { }
7125 VkDeviceSize offset;
7155 VkDeviceSize m_UsableSize;
7156 uint32_t m_LevelCount;
7162 } m_FreeList[MAX_LEVELS];
7164 size_t m_AllocationCount;
7168 VkDeviceSize m_SumFreeSize;
7170 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
7171 void DeleteNode(Node* node);
7172 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
7173 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
7174 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
7176 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
7177 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
7181 void AddToFreeListFront(uint32_t level, Node* node);
7185 void RemoveFromFreeList(uint32_t level, Node* node);
7187 #if VMA_STATS_STRING_ENABLED
7188 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
7198 class VmaDeviceMemoryBlock
7200 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
7202 VmaBlockMetadata* m_pMetadata;
7206 ~VmaDeviceMemoryBlock()
7208 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
7209 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
7216 uint32_t newMemoryTypeIndex,
7217 VkDeviceMemory newMemory,
7218 VkDeviceSize newSize,
7220 uint32_t algorithm);
7224 VmaPool GetParentPool()
const {
return m_hParentPool; }
7225 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
7226 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7227 uint32_t GetId()
const {
return m_Id; }
7228 void* GetMappedData()
const {
return m_pMappedData; }
7231 bool Validate()
const;
7236 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
7239 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7240 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7242 VkResult BindBufferMemory(
7245 VkDeviceSize allocationLocalOffset,
7248 VkResult BindImageMemory(
7251 VkDeviceSize allocationLocalOffset,
7257 uint32_t m_MemoryTypeIndex;
7259 VkDeviceMemory m_hMemory;
7267 uint32_t m_MapCount;
7268 void* m_pMappedData;
7271 struct VmaDefragmentationMove
7273 size_t srcBlockIndex;
7274 size_t dstBlockIndex;
7275 VkDeviceSize srcOffset;
7276 VkDeviceSize dstOffset;
7279 VmaDeviceMemoryBlock* pSrcBlock;
7280 VmaDeviceMemoryBlock* pDstBlock;
7283 class VmaDefragmentationAlgorithm;
7291 struct VmaBlockVector
7293 VMA_CLASS_NO_COPY(VmaBlockVector)
7298 uint32_t memoryTypeIndex,
7299 VkDeviceSize preferredBlockSize,
7300 size_t minBlockCount,
7301 size_t maxBlockCount,
7302 VkDeviceSize bufferImageGranularity,
7303 uint32_t frameInUseCount,
7304 bool explicitBlockSize,
7309 VkResult CreateMinBlocks();
7311 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7312 VmaPool GetParentPool()
const {
return m_hParentPool; }
7313 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7314 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7315 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7316 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7317 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7318 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7323 bool IsCorruptionDetectionEnabled()
const;
7326 uint32_t currentFrameIndex,
7328 VkDeviceSize alignment,
7330 VmaSuballocationType suballocType,
7331 size_t allocationCount,
7339 #if VMA_STATS_STRING_ENABLED
7340 void PrintDetailedMap(
class VmaJsonWriter& json);
7343 void MakePoolAllocationsLost(
7344 uint32_t currentFrameIndex,
7345 size_t* pLostAllocationCount);
7346 VkResult CheckCorruption();
7350 class VmaBlockVectorDefragmentationContext* pCtx,
7352 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7353 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7354 VkCommandBuffer commandBuffer);
7355 void DefragmentationEnd(
7356 class VmaBlockVectorDefragmentationContext* pCtx,
7360 uint32_t ProcessDefragmentations(
7361 class VmaBlockVectorDefragmentationContext *pCtx,
7364 void CommitDefragmentations(
7365 class VmaBlockVectorDefragmentationContext *pCtx,
7371 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7372 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7373 size_t CalcAllocationCount()
const;
7374 bool IsBufferImageGranularityConflictPossible()
const;
7377 friend class VmaDefragmentationAlgorithm_Generic;
7381 const uint32_t m_MemoryTypeIndex;
7382 const VkDeviceSize m_PreferredBlockSize;
7383 const size_t m_MinBlockCount;
7384 const size_t m_MaxBlockCount;
7385 const VkDeviceSize m_BufferImageGranularity;
7386 const uint32_t m_FrameInUseCount;
7387 const bool m_ExplicitBlockSize;
7388 const uint32_t m_Algorithm;
7389 const float m_Priority;
7390 VMA_RW_MUTEX m_Mutex;
7394 bool m_HasEmptyBlock;
7396 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7397 uint32_t m_NextBlockId;
7399 VkDeviceSize CalcMaxBlockSize()
const;
7402 void Remove(VmaDeviceMemoryBlock* pBlock);
7406 void IncrementallySortBlocks();
7408 VkResult AllocatePage(
7409 uint32_t currentFrameIndex,
7411 VkDeviceSize alignment,
7413 VmaSuballocationType suballocType,
7417 VkResult AllocateFromBlock(
7418 VmaDeviceMemoryBlock* pBlock,
7419 uint32_t currentFrameIndex,
7421 VkDeviceSize alignment,
7424 VmaSuballocationType suballocType,
7428 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7431 void ApplyDefragmentationMovesCpu(
7432 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7433 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7435 void ApplyDefragmentationMovesGpu(
7436 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7437 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7438 VkCommandBuffer commandBuffer);
7446 void UpdateHasEmptyBlock();
7451 VMA_CLASS_NO_COPY(VmaPool_T)
7453 VmaBlockVector m_BlockVector;
7458 VkDeviceSize preferredBlockSize);
7461 uint32_t GetId()
const {
return m_Id; }
7462 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7464 const char* GetName()
const {
return m_Name; }
7465 void SetName(
const char* pName);
7467 #if VMA_STATS_STRING_ENABLED
7474 VmaPool_T* m_PrevPool = VMA_NULL;
7475 VmaPool_T* m_NextPool = VMA_NULL;
7476 friend struct VmaPoolListItemTraits;
7479 struct VmaPoolListItemTraits
7481 typedef VmaPool_T ItemType;
7482 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
7483 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
7484 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
7485 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
7495 class VmaDefragmentationAlgorithm
7497 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7499 VmaDefragmentationAlgorithm(
7501 VmaBlockVector* pBlockVector,
7502 uint32_t currentFrameIndex) :
7503 m_hAllocator(hAllocator),
7504 m_pBlockVector(pBlockVector),
7505 m_CurrentFrameIndex(currentFrameIndex)
7508 virtual ~VmaDefragmentationAlgorithm()
7512 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7513 virtual void AddAll() = 0;
7515 virtual VkResult Defragment(
7516 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7517 VkDeviceSize maxBytesToMove,
7518 uint32_t maxAllocationsToMove,
7521 virtual VkDeviceSize GetBytesMoved()
const = 0;
7522 virtual uint32_t GetAllocationsMoved()
const = 0;
7526 VmaBlockVector*
const m_pBlockVector;
7527 const uint32_t m_CurrentFrameIndex;
7529 struct AllocationInfo
7532 VkBool32* m_pChanged;
7535 m_hAllocation(VK_NULL_HANDLE),
7536 m_pChanged(VMA_NULL)
7540 m_hAllocation(hAlloc),
7541 m_pChanged(pChanged)
7547 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7549 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7551 VmaDefragmentationAlgorithm_Generic(
7553 VmaBlockVector* pBlockVector,
7554 uint32_t currentFrameIndex,
7555 bool overlappingMoveSupported);
7556 virtual ~VmaDefragmentationAlgorithm_Generic();
7558 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7559 virtual void AddAll() { m_AllAllocations =
true; }
7561 virtual VkResult Defragment(
7562 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7563 VkDeviceSize maxBytesToMove,
7564 uint32_t maxAllocationsToMove,
7567 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7568 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7571 uint32_t m_AllocationCount;
7572 bool m_AllAllocations;
7574 VkDeviceSize m_BytesMoved;
7575 uint32_t m_AllocationsMoved;
7577 struct AllocationInfoSizeGreater
7579 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7581 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7585 struct AllocationInfoOffsetGreater
7587 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7589 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7595 size_t m_OriginalBlockIndex;
7596 VmaDeviceMemoryBlock* m_pBlock;
7597 bool m_HasNonMovableAllocations;
7598 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7600 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7601 m_OriginalBlockIndex(SIZE_MAX),
7603 m_HasNonMovableAllocations(true),
7604 m_Allocations(pAllocationCallbacks)
7608 void CalcHasNonMovableAllocations()
7610 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7611 const size_t defragmentAllocCount = m_Allocations.size();
7612 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7615 void SortAllocationsBySizeDescending()
7617 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7620 void SortAllocationsByOffsetDescending()
7622 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7626 struct BlockPointerLess
7628 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7630 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7632 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7634 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7640 struct BlockInfoCompareMoveDestination
7642 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7644 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7648 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7652 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7660 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7661 BlockInfoVector m_Blocks;
7663 VkResult DefragmentRound(
7664 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7665 VkDeviceSize maxBytesToMove,
7666 uint32_t maxAllocationsToMove,
7667 bool freeOldAllocations);
7669 size_t CalcBlocksWithNonMovableCount()
const;
7671 static bool MoveMakesSense(
7672 size_t dstBlockIndex, VkDeviceSize dstOffset,
7673 size_t srcBlockIndex, VkDeviceSize srcOffset);
7676 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7678 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7680 VmaDefragmentationAlgorithm_Fast(
7682 VmaBlockVector* pBlockVector,
7683 uint32_t currentFrameIndex,
7684 bool overlappingMoveSupported);
7685 virtual ~VmaDefragmentationAlgorithm_Fast();
7687 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7688 virtual void AddAll() { m_AllAllocations =
true; }
7690 virtual VkResult Defragment(
7691 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7692 VkDeviceSize maxBytesToMove,
7693 uint32_t maxAllocationsToMove,
7696 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7697 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7702 size_t origBlockIndex;
7705 class FreeSpaceDatabase
7711 s.blockInfoIndex = SIZE_MAX;
7712 for(
size_t i = 0; i < MAX_COUNT; ++i)
7714 m_FreeSpaces[i] = s;
7718 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7720 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7726 size_t bestIndex = SIZE_MAX;
7727 for(
size_t i = 0; i < MAX_COUNT; ++i)
7730 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7735 if(m_FreeSpaces[i].size < size &&
7736 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7742 if(bestIndex != SIZE_MAX)
7744 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7745 m_FreeSpaces[bestIndex].offset = offset;
7746 m_FreeSpaces[bestIndex].size = size;
7750 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7751 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7753 size_t bestIndex = SIZE_MAX;
7754 VkDeviceSize bestFreeSpaceAfter = 0;
7755 for(
size_t i = 0; i < MAX_COUNT; ++i)
7758 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7760 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7762 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7764 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7766 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7769 bestFreeSpaceAfter = freeSpaceAfter;
7775 if(bestIndex != SIZE_MAX)
7777 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7778 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7780 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7783 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7784 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7785 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7790 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7800 static const size_t MAX_COUNT = 4;
7804 size_t blockInfoIndex;
7805 VkDeviceSize offset;
7807 } m_FreeSpaces[MAX_COUNT];
7810 const bool m_OverlappingMoveSupported;
7812 uint32_t m_AllocationCount;
7813 bool m_AllAllocations;
7815 VkDeviceSize m_BytesMoved;
7816 uint32_t m_AllocationsMoved;
7818 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7820 void PreprocessMetadata();
7821 void PostprocessMetadata();
7822 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7825 struct VmaBlockDefragmentationContext
7829 BLOCK_FLAG_USED = 0x00000001,
7835 class VmaBlockVectorDefragmentationContext
7837 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7841 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7842 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7843 uint32_t defragmentationMovesProcessed;
7844 uint32_t defragmentationMovesCommitted;
7845 bool hasDefragmentationPlan;
7847 VmaBlockVectorDefragmentationContext(
7850 VmaBlockVector* pBlockVector,
7851 uint32_t currFrameIndex);
7852 ~VmaBlockVectorDefragmentationContext();
7854 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7855 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7856 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7858 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7859 void AddAll() { m_AllAllocations =
true; }
7868 VmaBlockVector*
const m_pBlockVector;
7869 const uint32_t m_CurrFrameIndex;
7871 VmaDefragmentationAlgorithm* m_pAlgorithm;
7879 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7880 bool m_AllAllocations;
7883 struct VmaDefragmentationContext_T
7886 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7888 VmaDefragmentationContext_T(
7890 uint32_t currFrameIndex,
7893 ~VmaDefragmentationContext_T();
7895 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7896 void AddAllocations(
7897 uint32_t allocationCount,
7899 VkBool32* pAllocationsChanged);
7907 VkResult Defragment(
7908 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7909 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7913 VkResult DefragmentPassEnd();
7917 const uint32_t m_CurrFrameIndex;
7918 const uint32_t m_Flags;
7921 VkDeviceSize m_MaxCpuBytesToMove;
7922 uint32_t m_MaxCpuAllocationsToMove;
7923 VkDeviceSize m_MaxGpuBytesToMove;
7924 uint32_t m_MaxGpuAllocationsToMove;
7927 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7929 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7932 #if VMA_RECORDING_ENABLED
7939 void WriteConfiguration(
7940 const VkPhysicalDeviceProperties& devProps,
7941 const VkPhysicalDeviceMemoryProperties& memProps,
7942 uint32_t vulkanApiVersion,
7943 bool dedicatedAllocationExtensionEnabled,
7944 bool bindMemory2ExtensionEnabled,
7945 bool memoryBudgetExtensionEnabled,
7946 bool deviceCoherentMemoryExtensionEnabled);
7949 void RecordCreateAllocator(uint32_t frameIndex);
7950 void RecordDestroyAllocator(uint32_t frameIndex);
7951 void RecordCreatePool(uint32_t frameIndex,
7954 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7955 void RecordAllocateMemory(uint32_t frameIndex,
7956 const VkMemoryRequirements& vkMemReq,
7959 void RecordAllocateMemoryPages(uint32_t frameIndex,
7960 const VkMemoryRequirements& vkMemReq,
7962 uint64_t allocationCount,
7964 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7965 const VkMemoryRequirements& vkMemReq,
7966 bool requiresDedicatedAllocation,
7967 bool prefersDedicatedAllocation,
7970 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7971 const VkMemoryRequirements& vkMemReq,
7972 bool requiresDedicatedAllocation,
7973 bool prefersDedicatedAllocation,
7976 void RecordFreeMemory(uint32_t frameIndex,
7978 void RecordFreeMemoryPages(uint32_t frameIndex,
7979 uint64_t allocationCount,
7981 void RecordSetAllocationUserData(uint32_t frameIndex,
7983 const void* pUserData);
7984 void RecordCreateLostAllocation(uint32_t frameIndex,
7986 void RecordMapMemory(uint32_t frameIndex,
7988 void RecordUnmapMemory(uint32_t frameIndex,
7990 void RecordFlushAllocation(uint32_t frameIndex,
7991 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7992 void RecordInvalidateAllocation(uint32_t frameIndex,
7993 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7994 void RecordCreateBuffer(uint32_t frameIndex,
7995 const VkBufferCreateInfo& bufCreateInfo,
7998 void RecordCreateImage(uint32_t frameIndex,
7999 const VkImageCreateInfo& imageCreateInfo,
8002 void RecordDestroyBuffer(uint32_t frameIndex,
8004 void RecordDestroyImage(uint32_t frameIndex,
8006 void RecordTouchAllocation(uint32_t frameIndex,
8008 void RecordGetAllocationInfo(uint32_t frameIndex,
8010 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
8012 void RecordDefragmentationBegin(uint32_t frameIndex,
8015 void RecordDefragmentationEnd(uint32_t frameIndex,
8017 void RecordSetPoolName(uint32_t frameIndex,
8028 class UserDataString
8032 const char* GetString()
const {
return m_Str; }
8042 VMA_MUTEX m_FileMutex;
8043 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
8045 void GetBasicParams(CallParams& outParams);
8048 template<
typename T>
8049 void PrintPointerList(uint64_t count,
const T* pItems)
8053 fprintf(m_File,
"%p", pItems[0]);
8054 for(uint64_t i = 1; i < count; ++i)
8056 fprintf(m_File,
" %p", pItems[i]);
8061 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
8070 class VmaAllocationObjectAllocator
8072 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
8074 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
8076 template<
typename... Types>
VmaAllocation Allocate(Types... args);
8081 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
8084 struct VmaCurrentBudgetData
8086 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
8087 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
8089 #if VMA_MEMORY_BUDGET
8090 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
8091 VMA_RW_MUTEX m_BudgetMutex;
8092 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
8093 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
8094 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
8097 VmaCurrentBudgetData()
8099 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
8101 m_BlockBytes[heapIndex] = 0;
8102 m_AllocationBytes[heapIndex] = 0;
8103 #if VMA_MEMORY_BUDGET
8104 m_VulkanUsage[heapIndex] = 0;
8105 m_VulkanBudget[heapIndex] = 0;
8106 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
8110 #if VMA_MEMORY_BUDGET
8111 m_OperationsSinceBudgetFetch = 0;
8115 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8117 m_AllocationBytes[heapIndex] += allocationSize;
8118 #if VMA_MEMORY_BUDGET
8119 ++m_OperationsSinceBudgetFetch;
8123 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8125 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
8126 m_AllocationBytes[heapIndex] -= allocationSize;
8127 #if VMA_MEMORY_BUDGET
8128 ++m_OperationsSinceBudgetFetch;
8134 struct VmaAllocator_T
8136 VMA_CLASS_NO_COPY(VmaAllocator_T)
8139 uint32_t m_VulkanApiVersion;
8140 bool m_UseKhrDedicatedAllocation;
8141 bool m_UseKhrBindMemory2;
8142 bool m_UseExtMemoryBudget;
8143 bool m_UseAmdDeviceCoherentMemory;
8144 bool m_UseKhrBufferDeviceAddress;
8145 bool m_UseExtMemoryPriority;
8147 VkInstance m_hInstance;
8148 bool m_AllocationCallbacksSpecified;
8149 VkAllocationCallbacks m_AllocationCallbacks;
8151 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
8154 uint32_t m_HeapSizeLimitMask;
8156 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
8157 VkPhysicalDeviceMemoryProperties m_MemProps;
8160 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
8162 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
8163 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
8164 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
8166 VmaCurrentBudgetData m_Budget;
8167 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
8173 const VkAllocationCallbacks* GetAllocationCallbacks()
const
8175 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
8179 return m_VulkanFunctions;
8182 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
8184 VkDeviceSize GetBufferImageGranularity()
const
8187 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
8188 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
8191 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
8192 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
8194 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
8196 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
8197 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
8200 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
8202 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
8203 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8206 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
8208 return IsMemoryTypeNonCoherent(memTypeIndex) ?
8209 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
8210 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
8213 bool IsIntegratedGpu()
const
8215 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
8218 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
8220 #if VMA_RECORDING_ENABLED
8221 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
8224 void GetBufferMemoryRequirements(
8226 VkMemoryRequirements& memReq,
8227 bool& requiresDedicatedAllocation,
8228 bool& prefersDedicatedAllocation)
const;
8229 void GetImageMemoryRequirements(
8231 VkMemoryRequirements& memReq,
8232 bool& requiresDedicatedAllocation,
8233 bool& prefersDedicatedAllocation)
const;
8236 VkResult AllocateMemory(
8237 const VkMemoryRequirements& vkMemReq,
8238 bool requiresDedicatedAllocation,
8239 bool prefersDedicatedAllocation,
8240 VkBuffer dedicatedBuffer,
8241 VkBufferUsageFlags dedicatedBufferUsage,
8242 VkImage dedicatedImage,
8244 VmaSuballocationType suballocType,
8245 size_t allocationCount,
8250 size_t allocationCount,
8253 void CalculateStats(
VmaStats* pStats);
8256 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
8258 #if VMA_STATS_STRING_ENABLED
8259 void PrintDetailedMap(
class VmaJsonWriter& json);
8262 VkResult DefragmentationBegin(
8266 VkResult DefragmentationEnd(
8269 VkResult DefragmentationPassBegin(
8272 VkResult DefragmentationPassEnd(
8279 void DestroyPool(
VmaPool pool);
8282 void SetCurrentFrameIndex(uint32_t frameIndex);
8283 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8285 void MakePoolAllocationsLost(
8287 size_t* pLostAllocationCount);
8288 VkResult CheckPoolCorruption(
VmaPool hPool);
8289 VkResult CheckCorruption(uint32_t memoryTypeBits);
8294 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8296 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8298 VkResult BindVulkanBuffer(
8299 VkDeviceMemory memory,
8300 VkDeviceSize memoryOffset,
8304 VkResult BindVulkanImage(
8305 VkDeviceMemory memory,
8306 VkDeviceSize memoryOffset,
8313 VkResult BindBufferMemory(
8315 VkDeviceSize allocationLocalOffset,
8318 VkResult BindImageMemory(
8320 VkDeviceSize allocationLocalOffset,
8324 VkResult FlushOrInvalidateAllocation(
8326 VkDeviceSize offset, VkDeviceSize size,
8327 VMA_CACHE_OPERATION op);
8328 VkResult FlushOrInvalidateAllocations(
8329 uint32_t allocationCount,
8331 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8332 VMA_CACHE_OPERATION op);
8334 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8340 uint32_t GetGpuDefragmentationMemoryTypeBits();
8343 VkDeviceSize m_PreferredLargeHeapBlockSize;
8345 VkPhysicalDevice m_PhysicalDevice;
8346 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8347 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8349 VMA_RW_MUTEX m_PoolsMutex;
8350 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
8353 uint32_t m_NextPoolId;
8358 uint32_t m_GlobalMemoryTypeBits;
8360 #if VMA_RECORDING_ENABLED
8361 VmaRecorder* m_pRecorder;
8366 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8367 void ImportVulkanFunctions_Static();
8372 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8373 void ImportVulkanFunctions_Dynamic();
8376 void ValidateVulkanFunctions();
8378 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8380 VkResult AllocateMemoryOfType(
8382 VkDeviceSize alignment,
8383 bool dedicatedAllocation,
8384 VkBuffer dedicatedBuffer,
8385 VkBufferUsageFlags dedicatedBufferUsage,
8386 VkImage dedicatedImage,
8388 uint32_t memTypeIndex,
8389 VmaSuballocationType suballocType,
8390 size_t allocationCount,
8394 VkResult AllocateDedicatedMemoryPage(
8396 VmaSuballocationType suballocType,
8397 uint32_t memTypeIndex,
8398 const VkMemoryAllocateInfo& allocInfo,
8400 bool isUserDataString,
8405 VkResult AllocateDedicatedMemory(
8407 VmaSuballocationType suballocType,
8408 uint32_t memTypeIndex,
8411 bool isUserDataString,
8414 VkBuffer dedicatedBuffer,
8415 VkBufferUsageFlags dedicatedBufferUsage,
8416 VkImage dedicatedImage,
8417 size_t allocationCount,
8426 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8428 uint32_t CalculateGlobalMemoryTypeBits()
const;
8430 bool GetFlushOrInvalidateRange(
8432 VkDeviceSize offset, VkDeviceSize size,
8433 VkMappedMemoryRange& outRange)
const;
8435 #if VMA_MEMORY_BUDGET
8436 void UpdateVulkanBudget();
8443 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8445 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8448 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8450 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8453 template<
typename T>
8456 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8459 template<
typename T>
8460 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8462 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8465 template<
typename T>
8466 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8471 VmaFree(hAllocator, ptr);
8475 template<
typename T>
8476 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8480 for(
size_t i = count; i--; )
8482 VmaFree(hAllocator, ptr);
8489 #if VMA_STATS_STRING_ENABLED
8491 class VmaStringBuilder
8494 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8495 size_t GetLength()
const {
return m_Data.size(); }
8496 const char* GetData()
const {
return m_Data.data(); }
8498 void Add(
char ch) { m_Data.push_back(ch); }
8499 void Add(
const char* pStr);
8500 void AddNewLine() { Add(
'\n'); }
8501 void AddNumber(uint32_t num);
8502 void AddNumber(uint64_t num);
8503 void AddPointer(
const void* ptr);
8506 VmaVector< char, VmaStlAllocator<char> > m_Data;
8509 void VmaStringBuilder::Add(
const char* pStr)
8511 const size_t strLen = strlen(pStr);
8514 const size_t oldCount = m_Data.size();
8515 m_Data.resize(oldCount + strLen);
8516 memcpy(m_Data.data() + oldCount, pStr, strLen);
8520 void VmaStringBuilder::AddNumber(uint32_t num)
8527 *--p =
'0' + (num % 10);
8534 void VmaStringBuilder::AddNumber(uint64_t num)
8541 *--p =
'0' + (num % 10);
8548 void VmaStringBuilder::AddPointer(
const void* ptr)
8551 VmaPtrToStr(buf,
sizeof(buf), ptr);
8560 #if VMA_STATS_STRING_ENABLED
8564 VMA_CLASS_NO_COPY(VmaJsonWriter)
8566 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8569 void BeginObject(
bool singleLine =
false);
8572 void BeginArray(
bool singleLine =
false);
8575 void WriteString(
const char* pStr);
8576 void BeginString(
const char* pStr = VMA_NULL);
8577 void ContinueString(
const char* pStr);
8578 void ContinueString(uint32_t n);
8579 void ContinueString(uint64_t n);
8580 void ContinueString_Pointer(
const void* ptr);
8581 void EndString(
const char* pStr = VMA_NULL);
8583 void WriteNumber(uint32_t n);
8584 void WriteNumber(uint64_t n);
8585 void WriteBool(
bool b);
8589 static const char*
const INDENT;
8591 enum COLLECTION_TYPE
8593 COLLECTION_TYPE_OBJECT,
8594 COLLECTION_TYPE_ARRAY,
8598 COLLECTION_TYPE type;
8599 uint32_t valueCount;
8600 bool singleLineMode;
8603 VmaStringBuilder& m_SB;
8604 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8605 bool m_InsideString;
8607 void BeginValue(
bool isString);
8608 void WriteIndent(
bool oneLess =
false);
8611 const char*
const VmaJsonWriter::INDENT =
" ";
8613 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8615 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8616 m_InsideString(false)
8620 VmaJsonWriter::~VmaJsonWriter()
8622 VMA_ASSERT(!m_InsideString);
8623 VMA_ASSERT(m_Stack.empty());
8626 void VmaJsonWriter::BeginObject(
bool singleLine)
8628 VMA_ASSERT(!m_InsideString);
8634 item.type = COLLECTION_TYPE_OBJECT;
8635 item.valueCount = 0;
8636 item.singleLineMode = singleLine;
8637 m_Stack.push_back(item);
8640 void VmaJsonWriter::EndObject()
8642 VMA_ASSERT(!m_InsideString);
8647 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8651 void VmaJsonWriter::BeginArray(
bool singleLine)
8653 VMA_ASSERT(!m_InsideString);
8659 item.type = COLLECTION_TYPE_ARRAY;
8660 item.valueCount = 0;
8661 item.singleLineMode = singleLine;
8662 m_Stack.push_back(item);
8665 void VmaJsonWriter::EndArray()
8667 VMA_ASSERT(!m_InsideString);
8672 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8676 void VmaJsonWriter::WriteString(
const char* pStr)
8682 void VmaJsonWriter::BeginString(
const char* pStr)
8684 VMA_ASSERT(!m_InsideString);
8688 m_InsideString =
true;
8689 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8691 ContinueString(pStr);
8695 void VmaJsonWriter::ContinueString(
const char* pStr)
8697 VMA_ASSERT(m_InsideString);
8699 const size_t strLen = strlen(pStr);
8700 for(
size_t i = 0; i < strLen; ++i)
8733 VMA_ASSERT(0 &&
"Character not currently supported.");
8739 void VmaJsonWriter::ContinueString(uint32_t n)
8741 VMA_ASSERT(m_InsideString);
8745 void VmaJsonWriter::ContinueString(uint64_t n)
8747 VMA_ASSERT(m_InsideString);
8751 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8753 VMA_ASSERT(m_InsideString);
8754 m_SB.AddPointer(ptr);
8757 void VmaJsonWriter::EndString(
const char* pStr)
8759 VMA_ASSERT(m_InsideString);
8760 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8762 ContinueString(pStr);
8765 m_InsideString =
false;
8768 void VmaJsonWriter::WriteNumber(uint32_t n)
8770 VMA_ASSERT(!m_InsideString);
8775 void VmaJsonWriter::WriteNumber(uint64_t n)
8777 VMA_ASSERT(!m_InsideString);
8782 void VmaJsonWriter::WriteBool(
bool b)
8784 VMA_ASSERT(!m_InsideString);
8786 m_SB.Add(b ?
"true" :
"false");
8789 void VmaJsonWriter::WriteNull()
8791 VMA_ASSERT(!m_InsideString);
8796 void VmaJsonWriter::BeginValue(
bool isString)
8798 if(!m_Stack.empty())
8800 StackItem& currItem = m_Stack.back();
8801 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8802 currItem.valueCount % 2 == 0)
8804 VMA_ASSERT(isString);
8807 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8808 currItem.valueCount % 2 != 0)
8812 else if(currItem.valueCount > 0)
8821 ++currItem.valueCount;
8825 void VmaJsonWriter::WriteIndent(
bool oneLess)
8827 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8831 size_t count = m_Stack.size();
8832 if(count > 0 && oneLess)
8836 for(
size_t i = 0; i < count; ++i)
8847 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8849 if(IsUserDataString())
8851 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8853 FreeUserDataString(hAllocator);
8855 if(pUserData != VMA_NULL)
8857 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8862 m_pUserData = pUserData;
8866 void VmaAllocation_T::ChangeBlockAllocation(
8868 VmaDeviceMemoryBlock* block,
8869 VkDeviceSize offset)
8871 VMA_ASSERT(block != VMA_NULL);
8872 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8875 if(block != m_BlockAllocation.m_Block)
8877 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8878 if(IsPersistentMap())
8880 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8881 block->Map(hAllocator, mapRefCount, VMA_NULL);
8884 m_BlockAllocation.m_Block = block;
8885 m_BlockAllocation.m_Offset = offset;
8888 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8890 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8891 m_BlockAllocation.m_Offset = newOffset;
8894 VkDeviceSize VmaAllocation_T::GetOffset()
const
8898 case ALLOCATION_TYPE_BLOCK:
8899 return m_BlockAllocation.m_Offset;
8900 case ALLOCATION_TYPE_DEDICATED:
8908 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8912 case ALLOCATION_TYPE_BLOCK:
8913 return m_BlockAllocation.m_Block->GetDeviceMemory();
8914 case ALLOCATION_TYPE_DEDICATED:
8915 return m_DedicatedAllocation.m_hMemory;
8918 return VK_NULL_HANDLE;
8922 void* VmaAllocation_T::GetMappedData()
const
8926 case ALLOCATION_TYPE_BLOCK:
8929 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8930 VMA_ASSERT(pBlockData != VMA_NULL);
8931 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8938 case ALLOCATION_TYPE_DEDICATED:
8939 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8940 return m_DedicatedAllocation.m_pMappedData;
8947 bool VmaAllocation_T::CanBecomeLost()
const
8951 case ALLOCATION_TYPE_BLOCK:
8952 return m_BlockAllocation.m_CanBecomeLost;
8953 case ALLOCATION_TYPE_DEDICATED:
8961 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8963 VMA_ASSERT(CanBecomeLost());
8969 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8972 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8977 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8983 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8993 #if VMA_STATS_STRING_ENABLED
8996 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
9005 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
9007 json.WriteString(
"Type");
9008 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
9010 json.WriteString(
"Size");
9011 json.WriteNumber(m_Size);
9013 if(m_pUserData != VMA_NULL)
9015 json.WriteString(
"UserData");
9016 if(IsUserDataString())
9018 json.WriteString((
const char*)m_pUserData);
9023 json.ContinueString_Pointer(m_pUserData);
9028 json.WriteString(
"CreationFrameIndex");
9029 json.WriteNumber(m_CreationFrameIndex);
9031 json.WriteString(
"LastUseFrameIndex");
9032 json.WriteNumber(GetLastUseFrameIndex());
9034 if(m_BufferImageUsage != 0)
9036 json.WriteString(
"Usage");
9037 json.WriteNumber(m_BufferImageUsage);
9043 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
9045 VMA_ASSERT(IsUserDataString());
9046 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
9047 m_pUserData = VMA_NULL;
9050 void VmaAllocation_T::BlockAllocMap()
9052 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9054 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9060 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
9064 void VmaAllocation_T::BlockAllocUnmap()
9066 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9068 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9074 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
9078 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
9080 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9084 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9086 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
9087 *ppData = m_DedicatedAllocation.m_pMappedData;
9093 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
9094 return VK_ERROR_MEMORY_MAP_FAILED;
9099 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9100 hAllocator->m_hDevice,
9101 m_DedicatedAllocation.m_hMemory,
9106 if(result == VK_SUCCESS)
9108 m_DedicatedAllocation.m_pMappedData = *ppData;
9115 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
9117 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9119 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9124 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
9125 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
9126 hAllocator->m_hDevice,
9127 m_DedicatedAllocation.m_hMemory);
9132 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
9136 #if VMA_STATS_STRING_ENABLED
9138 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
9142 json.WriteString(
"Blocks");
9145 json.WriteString(
"Allocations");
9148 json.WriteString(
"UnusedRanges");
9151 json.WriteString(
"UsedBytes");
9154 json.WriteString(
"UnusedBytes");
9159 json.WriteString(
"AllocationSize");
9160 json.BeginObject(
true);
9161 json.WriteString(
"Min");
9163 json.WriteString(
"Avg");
9165 json.WriteString(
"Max");
9172 json.WriteString(
"UnusedRangeSize");
9173 json.BeginObject(
true);
9174 json.WriteString(
"Min");
9176 json.WriteString(
"Avg");
9178 json.WriteString(
"Max");
9188 struct VmaSuballocationItemSizeLess
9191 const VmaSuballocationList::iterator lhs,
9192 const VmaSuballocationList::iterator rhs)
const
9194 return lhs->size < rhs->size;
9197 const VmaSuballocationList::iterator lhs,
9198 VkDeviceSize rhsSize)
const
9200 return lhs->size < rhsSize;
9208 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
9210 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
9214 #if VMA_STATS_STRING_ENABLED
9216 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
9217 VkDeviceSize unusedBytes,
9218 size_t allocationCount,
9219 size_t unusedRangeCount)
const
9223 json.WriteString(
"TotalBytes");
9224 json.WriteNumber(GetSize());
9226 json.WriteString(
"UnusedBytes");
9227 json.WriteNumber(unusedBytes);
9229 json.WriteString(
"Allocations");
9230 json.WriteNumber((uint64_t)allocationCount);
9232 json.WriteString(
"UnusedRanges");
9233 json.WriteNumber((uint64_t)unusedRangeCount);
9235 json.WriteString(
"Suballocations");
9239 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
9240 VkDeviceSize offset,
9243 json.BeginObject(
true);
9245 json.WriteString(
"Offset");
9246 json.WriteNumber(offset);
9248 hAllocation->PrintParameters(json);
9253 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
9254 VkDeviceSize offset,
9255 VkDeviceSize size)
const
9257 json.BeginObject(
true);
9259 json.WriteString(
"Offset");
9260 json.WriteNumber(offset);
9262 json.WriteString(
"Type");
9263 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
9265 json.WriteString(
"Size");
9266 json.WriteNumber(size);
9271 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
9282 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
9283 VmaBlockMetadata(hAllocator),
9286 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9287 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9291 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9295 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9297 VmaBlockMetadata::Init(size);
9300 m_SumFreeSize = size;
9302 VmaSuballocation suballoc = {};
9303 suballoc.offset = 0;
9304 suballoc.size = size;
9305 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9306 suballoc.hAllocation = VK_NULL_HANDLE;
9308 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9309 m_Suballocations.push_back(suballoc);
9310 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9312 m_FreeSuballocationsBySize.push_back(suballocItem);
9315 bool VmaBlockMetadata_Generic::Validate()
const
9317 VMA_VALIDATE(!m_Suballocations.empty());
9320 VkDeviceSize calculatedOffset = 0;
9322 uint32_t calculatedFreeCount = 0;
9324 VkDeviceSize calculatedSumFreeSize = 0;
9327 size_t freeSuballocationsToRegister = 0;
9329 bool prevFree =
false;
9331 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9332 suballocItem != m_Suballocations.cend();
9335 const VmaSuballocation& subAlloc = *suballocItem;
9338 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9340 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9342 VMA_VALIDATE(!prevFree || !currFree);
9344 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9348 calculatedSumFreeSize += subAlloc.size;
9349 ++calculatedFreeCount;
9350 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9352 ++freeSuballocationsToRegister;
9356 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9360 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9361 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9364 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9367 calculatedOffset += subAlloc.size;
9368 prevFree = currFree;
9373 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9375 VkDeviceSize lastSize = 0;
9376 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9378 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9381 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9383 VMA_VALIDATE(suballocItem->size >= lastSize);
9385 lastSize = suballocItem->size;
9389 VMA_VALIDATE(ValidateFreeSuballocationList());
9390 VMA_VALIDATE(calculatedOffset == GetSize());
9391 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9392 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9397 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9399 if(!m_FreeSuballocationsBySize.empty())
9401 return m_FreeSuballocationsBySize.back()->size;
9409 bool VmaBlockMetadata_Generic::IsEmpty()
const
9411 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9414 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9418 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9430 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9431 suballocItem != m_Suballocations.cend();
9434 const VmaSuballocation& suballoc = *suballocItem;
9435 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9448 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9450 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9452 inoutStats.
size += GetSize();
9459 #if VMA_STATS_STRING_ENABLED
9461 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9463 PrintDetailedMap_Begin(json,
9465 m_Suballocations.size() - (
size_t)m_FreeCount,
9469 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9470 suballocItem != m_Suballocations.cend();
9471 ++suballocItem, ++i)
9473 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9475 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9479 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9483 PrintDetailedMap_End(json);
9488 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9489 uint32_t currentFrameIndex,
9490 uint32_t frameInUseCount,
9491 VkDeviceSize bufferImageGranularity,
9492 VkDeviceSize allocSize,
9493 VkDeviceSize allocAlignment,
9495 VmaSuballocationType allocType,
9496 bool canMakeOtherLost,
9498 VmaAllocationRequest* pAllocationRequest)
9500 VMA_ASSERT(allocSize > 0);
9501 VMA_ASSERT(!upperAddress);
9502 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9503 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9504 VMA_HEAVY_ASSERT(Validate());
9506 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9509 if(canMakeOtherLost ==
false &&
9510 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9516 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9517 if(freeSuballocCount > 0)
9522 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9523 m_FreeSuballocationsBySize.data(),
9524 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9525 allocSize + 2 * VMA_DEBUG_MARGIN,
9526 VmaSuballocationItemSizeLess());
9527 size_t index = it - m_FreeSuballocationsBySize.data();
9528 for(; index < freeSuballocCount; ++index)
9533 bufferImageGranularity,
9537 m_FreeSuballocationsBySize[index],
9539 &pAllocationRequest->offset,
9540 &pAllocationRequest->itemsToMakeLostCount,
9541 &pAllocationRequest->sumFreeSize,
9542 &pAllocationRequest->sumItemSize))
9544 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9549 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9551 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9552 it != m_Suballocations.end();
9555 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9558 bufferImageGranularity,
9564 &pAllocationRequest->offset,
9565 &pAllocationRequest->itemsToMakeLostCount,
9566 &pAllocationRequest->sumFreeSize,
9567 &pAllocationRequest->sumItemSize))
9569 pAllocationRequest->item = it;
9577 for(
size_t index = freeSuballocCount; index--; )
9582 bufferImageGranularity,
9586 m_FreeSuballocationsBySize[index],
9588 &pAllocationRequest->offset,
9589 &pAllocationRequest->itemsToMakeLostCount,
9590 &pAllocationRequest->sumFreeSize,
9591 &pAllocationRequest->sumItemSize))
9593 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9600 if(canMakeOtherLost)
9605 VmaAllocationRequest tmpAllocRequest = {};
9606 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9607 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9608 suballocIt != m_Suballocations.end();
9611 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9612 suballocIt->hAllocation->CanBecomeLost())
9617 bufferImageGranularity,
9623 &tmpAllocRequest.offset,
9624 &tmpAllocRequest.itemsToMakeLostCount,
9625 &tmpAllocRequest.sumFreeSize,
9626 &tmpAllocRequest.sumItemSize))
9630 *pAllocationRequest = tmpAllocRequest;
9631 pAllocationRequest->item = suballocIt;
9634 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9636 *pAllocationRequest = tmpAllocRequest;
9637 pAllocationRequest->item = suballocIt;
9650 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9651 uint32_t currentFrameIndex,
9652 uint32_t frameInUseCount,
9653 VmaAllocationRequest* pAllocationRequest)
9655 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9657 while(pAllocationRequest->itemsToMakeLostCount > 0)
9659 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9661 ++pAllocationRequest->item;
9663 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9664 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9665 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9666 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9668 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9669 --pAllocationRequest->itemsToMakeLostCount;
9677 VMA_HEAVY_ASSERT(Validate());
9678 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9679 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9684 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9686 uint32_t lostAllocationCount = 0;
9687 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9688 it != m_Suballocations.end();
9691 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9692 it->hAllocation->CanBecomeLost() &&
9693 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9695 it = FreeSuballocation(it);
9696 ++lostAllocationCount;
9699 return lostAllocationCount;
9702 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9704 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9705 it != m_Suballocations.end();
9708 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9710 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9712 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9713 return VK_ERROR_VALIDATION_FAILED_EXT;
9715 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9717 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9718 return VK_ERROR_VALIDATION_FAILED_EXT;
9726 void VmaBlockMetadata_Generic::Alloc(
9727 const VmaAllocationRequest& request,
9728 VmaSuballocationType type,
9729 VkDeviceSize allocSize,
9732 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9733 VMA_ASSERT(request.item != m_Suballocations.end());
9734 VmaSuballocation& suballoc = *request.item;
9736 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9738 VMA_ASSERT(request.offset >= suballoc.offset);
9739 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9740 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9741 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9745 UnregisterFreeSuballocation(request.item);
9747 suballoc.offset = request.offset;
9748 suballoc.size = allocSize;
9749 suballoc.type = type;
9750 suballoc.hAllocation = hAllocation;
9755 VmaSuballocation paddingSuballoc = {};
9756 paddingSuballoc.offset = request.offset + allocSize;
9757 paddingSuballoc.size = paddingEnd;
9758 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9759 VmaSuballocationList::iterator next = request.item;
9761 const VmaSuballocationList::iterator paddingEndItem =
9762 m_Suballocations.insert(next, paddingSuballoc);
9763 RegisterFreeSuballocation(paddingEndItem);
9769 VmaSuballocation paddingSuballoc = {};
9770 paddingSuballoc.offset = request.offset - paddingBegin;
9771 paddingSuballoc.size = paddingBegin;
9772 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9773 const VmaSuballocationList::iterator paddingBeginItem =
9774 m_Suballocations.insert(request.item, paddingSuballoc);
9775 RegisterFreeSuballocation(paddingBeginItem);
9779 m_FreeCount = m_FreeCount - 1;
9780 if(paddingBegin > 0)
9788 m_SumFreeSize -= allocSize;
9791 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9793 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9794 suballocItem != m_Suballocations.end();
9797 VmaSuballocation& suballoc = *suballocItem;
9798 if(suballoc.hAllocation == allocation)
9800 FreeSuballocation(suballocItem);
9801 VMA_HEAVY_ASSERT(Validate());
9805 VMA_ASSERT(0 &&
"Not found!");
9808 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9810 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9811 suballocItem != m_Suballocations.end();
9814 VmaSuballocation& suballoc = *suballocItem;
9815 if(suballoc.offset == offset)
9817 FreeSuballocation(suballocItem);
9821 VMA_ASSERT(0 &&
"Not found!");
9824 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9826 VkDeviceSize lastSize = 0;
9827 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9829 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9831 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9832 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9833 VMA_VALIDATE(it->size >= lastSize);
9834 lastSize = it->size;
9839 bool VmaBlockMetadata_Generic::CheckAllocation(
9840 uint32_t currentFrameIndex,
9841 uint32_t frameInUseCount,
9842 VkDeviceSize bufferImageGranularity,
9843 VkDeviceSize allocSize,
9844 VkDeviceSize allocAlignment,
9845 VmaSuballocationType allocType,
9846 VmaSuballocationList::const_iterator suballocItem,
9847 bool canMakeOtherLost,
9848 VkDeviceSize* pOffset,
9849 size_t* itemsToMakeLostCount,
9850 VkDeviceSize* pSumFreeSize,
9851 VkDeviceSize* pSumItemSize)
const
9853 VMA_ASSERT(allocSize > 0);
9854 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9855 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9856 VMA_ASSERT(pOffset != VMA_NULL);
9858 *itemsToMakeLostCount = 0;
9862 if(canMakeOtherLost)
9864 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9866 *pSumFreeSize = suballocItem->size;
9870 if(suballocItem->hAllocation->CanBecomeLost() &&
9871 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9873 ++*itemsToMakeLostCount;
9874 *pSumItemSize = suballocItem->size;
9883 if(GetSize() - suballocItem->offset < allocSize)
9889 *pOffset = suballocItem->offset;
9892 if(VMA_DEBUG_MARGIN > 0)
9894 *pOffset += VMA_DEBUG_MARGIN;
9898 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9902 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
9904 bool bufferImageGranularityConflict =
false;
9905 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9906 while(prevSuballocItem != m_Suballocations.cbegin())
9909 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9910 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9912 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9914 bufferImageGranularityConflict =
true;
9922 if(bufferImageGranularityConflict)
9924 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9930 if(*pOffset >= suballocItem->offset + suballocItem->size)
9936 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9939 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9941 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9943 if(suballocItem->offset + totalSize > GetSize())
9950 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9951 if(totalSize > suballocItem->size)
9953 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9954 while(remainingSize > 0)
9957 if(lastSuballocItem == m_Suballocations.cend())
9961 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9963 *pSumFreeSize += lastSuballocItem->size;
9967 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9968 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9969 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9971 ++*itemsToMakeLostCount;
9972 *pSumItemSize += lastSuballocItem->size;
9979 remainingSize = (lastSuballocItem->size < remainingSize) ?
9980 remainingSize - lastSuballocItem->size : 0;
9986 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
9988 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9990 while(nextSuballocItem != m_Suballocations.cend())
9992 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9993 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9995 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9997 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9998 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9999 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10001 ++*itemsToMakeLostCount;
10014 ++nextSuballocItem;
10020 const VmaSuballocation& suballoc = *suballocItem;
10021 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10023 *pSumFreeSize = suballoc.size;
10026 if(suballoc.size < allocSize)
10032 *pOffset = suballoc.offset;
10035 if(VMA_DEBUG_MARGIN > 0)
10037 *pOffset += VMA_DEBUG_MARGIN;
10041 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
10045 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
10047 bool bufferImageGranularityConflict =
false;
10048 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
10049 while(prevSuballocItem != m_Suballocations.cbegin())
10051 --prevSuballocItem;
10052 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10053 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10055 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10057 bufferImageGranularityConflict =
true;
10065 if(bufferImageGranularityConflict)
10067 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10072 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
10075 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10078 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
10085 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10087 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
10088 ++nextSuballocItem;
10089 while(nextSuballocItem != m_Suballocations.cend())
10091 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10092 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10094 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10104 ++nextSuballocItem;
10113 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
10115 VMA_ASSERT(item != m_Suballocations.end());
10116 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10118 VmaSuballocationList::iterator nextItem = item;
10120 VMA_ASSERT(nextItem != m_Suballocations.end());
10121 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
10123 item->size += nextItem->size;
10125 m_Suballocations.erase(nextItem);
10128 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
10131 VmaSuballocation& suballoc = *suballocItem;
10132 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10133 suballoc.hAllocation = VK_NULL_HANDLE;
10137 m_SumFreeSize += suballoc.size;
10140 bool mergeWithNext =
false;
10141 bool mergeWithPrev =
false;
10143 VmaSuballocationList::iterator nextItem = suballocItem;
10145 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
10147 mergeWithNext =
true;
10150 VmaSuballocationList::iterator prevItem = suballocItem;
10151 if(suballocItem != m_Suballocations.begin())
10154 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10156 mergeWithPrev =
true;
10162 UnregisterFreeSuballocation(nextItem);
10163 MergeFreeWithNext(suballocItem);
10168 UnregisterFreeSuballocation(prevItem);
10169 MergeFreeWithNext(prevItem);
10170 RegisterFreeSuballocation(prevItem);
10175 RegisterFreeSuballocation(suballocItem);
10176 return suballocItem;
10180 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
10182 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10183 VMA_ASSERT(item->size > 0);
10187 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10189 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10191 if(m_FreeSuballocationsBySize.empty())
10193 m_FreeSuballocationsBySize.push_back(item);
10197 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
10205 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
10207 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10208 VMA_ASSERT(item->size > 0);
10212 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10214 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10216 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
10217 m_FreeSuballocationsBySize.data(),
10218 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
10220 VmaSuballocationItemSizeLess());
10221 for(
size_t index = it - m_FreeSuballocationsBySize.data();
10222 index < m_FreeSuballocationsBySize.size();
10225 if(m_FreeSuballocationsBySize[index] == item)
10227 VmaVectorRemove(m_FreeSuballocationsBySize, index);
10230 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
10232 VMA_ASSERT(0 &&
"Not found.");
10238 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
10239 VkDeviceSize bufferImageGranularity,
10240 VmaSuballocationType& inOutPrevSuballocType)
const
10242 if(bufferImageGranularity == 1 || IsEmpty())
10247 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
10248 bool typeConflictFound =
false;
10249 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
10250 it != m_Suballocations.cend();
10253 const VmaSuballocationType suballocType = it->type;
10254 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
10256 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
10257 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
10259 typeConflictFound =
true;
10261 inOutPrevSuballocType = suballocType;
10265 return typeConflictFound || minAlignment >= bufferImageGranularity;
10271 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
10272 VmaBlockMetadata(hAllocator),
10274 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10275 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10276 m_1stVectorIndex(0),
10277 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
10278 m_1stNullItemsBeginCount(0),
10279 m_1stNullItemsMiddleCount(0),
10280 m_2ndNullItemsCount(0)
10284 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10288 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10290 VmaBlockMetadata::Init(size);
10291 m_SumFreeSize = size;
10294 bool VmaBlockMetadata_Linear::Validate()
const
10296 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10297 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10299 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10300 VMA_VALIDATE(!suballocations1st.empty() ||
10301 suballocations2nd.empty() ||
10302 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10304 if(!suballocations1st.empty())
10307 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10309 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10311 if(!suballocations2nd.empty())
10314 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10317 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10318 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10320 VkDeviceSize sumUsedSize = 0;
10321 const size_t suballoc1stCount = suballocations1st.size();
10322 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10324 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10326 const size_t suballoc2ndCount = suballocations2nd.size();
10327 size_t nullItem2ndCount = 0;
10328 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10330 const VmaSuballocation& suballoc = suballocations2nd[i];
10331 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10333 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10334 VMA_VALIDATE(suballoc.offset >= offset);
10338 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10339 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10340 sumUsedSize += suballoc.size;
10344 ++nullItem2ndCount;
10347 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10350 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10353 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10355 const VmaSuballocation& suballoc = suballocations1st[i];
10356 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10357 suballoc.hAllocation == VK_NULL_HANDLE);
10360 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10362 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10364 const VmaSuballocation& suballoc = suballocations1st[i];
10365 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10367 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10368 VMA_VALIDATE(suballoc.offset >= offset);
10369 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10373 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10374 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10375 sumUsedSize += suballoc.size;
10379 ++nullItem1stCount;
10382 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10384 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10386 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10388 const size_t suballoc2ndCount = suballocations2nd.size();
10389 size_t nullItem2ndCount = 0;
10390 for(
size_t i = suballoc2ndCount; i--; )
10392 const VmaSuballocation& suballoc = suballocations2nd[i];
10393 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10395 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10396 VMA_VALIDATE(suballoc.offset >= offset);
10400 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10401 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10402 sumUsedSize += suballoc.size;
10406 ++nullItem2ndCount;
10409 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10412 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10415 VMA_VALIDATE(offset <= GetSize());
10416 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10421 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10423 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10424 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10427 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10429 const VkDeviceSize size = GetSize();
10441 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10443 switch(m_2ndVectorMode)
10445 case SECOND_VECTOR_EMPTY:
10451 const size_t suballocations1stCount = suballocations1st.size();
10452 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10453 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10454 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10456 firstSuballoc.offset,
10457 size - (lastSuballoc.offset + lastSuballoc.size));
10461 case SECOND_VECTOR_RING_BUFFER:
10466 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10467 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10468 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10469 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10473 case SECOND_VECTOR_DOUBLE_STACK:
10478 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10479 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10480 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10481 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10491 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10493 const VkDeviceSize size = GetSize();
10494 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10495 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10496 const size_t suballoc1stCount = suballocations1st.size();
10497 const size_t suballoc2ndCount = suballocations2nd.size();
10508 VkDeviceSize lastOffset = 0;
10510 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10512 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10513 size_t nextAlloc2ndIndex = 0;
10514 while(lastOffset < freeSpace2ndTo1stEnd)
10517 while(nextAlloc2ndIndex < suballoc2ndCount &&
10518 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10520 ++nextAlloc2ndIndex;
10524 if(nextAlloc2ndIndex < suballoc2ndCount)
10526 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10529 if(lastOffset < suballoc.offset)
10532 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10546 lastOffset = suballoc.offset + suballoc.size;
10547 ++nextAlloc2ndIndex;
10553 if(lastOffset < freeSpace2ndTo1stEnd)
10555 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10563 lastOffset = freeSpace2ndTo1stEnd;
10568 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10569 const VkDeviceSize freeSpace1stTo2ndEnd =
10570 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10571 while(lastOffset < freeSpace1stTo2ndEnd)
10574 while(nextAlloc1stIndex < suballoc1stCount &&
10575 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10577 ++nextAlloc1stIndex;
10581 if(nextAlloc1stIndex < suballoc1stCount)
10583 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10586 if(lastOffset < suballoc.offset)
10589 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10603 lastOffset = suballoc.offset + suballoc.size;
10604 ++nextAlloc1stIndex;
10610 if(lastOffset < freeSpace1stTo2ndEnd)
10612 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10620 lastOffset = freeSpace1stTo2ndEnd;
10624 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10626 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10627 while(lastOffset < size)
10630 while(nextAlloc2ndIndex != SIZE_MAX &&
10631 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10633 --nextAlloc2ndIndex;
10637 if(nextAlloc2ndIndex != SIZE_MAX)
10639 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10642 if(lastOffset < suballoc.offset)
10645 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10659 lastOffset = suballoc.offset + suballoc.size;
10660 --nextAlloc2ndIndex;
10666 if(lastOffset < size)
10668 const VkDeviceSize unusedRangeSize = size - lastOffset;
10684 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10686 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10687 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10688 const VkDeviceSize size = GetSize();
10689 const size_t suballoc1stCount = suballocations1st.size();
10690 const size_t suballoc2ndCount = suballocations2nd.size();
10692 inoutStats.
size += size;
10694 VkDeviceSize lastOffset = 0;
10696 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10698 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10699 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10700 while(lastOffset < freeSpace2ndTo1stEnd)
10703 while(nextAlloc2ndIndex < suballoc2ndCount &&
10704 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10706 ++nextAlloc2ndIndex;
10710 if(nextAlloc2ndIndex < suballoc2ndCount)
10712 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10715 if(lastOffset < suballoc.offset)
10718 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10729 lastOffset = suballoc.offset + suballoc.size;
10730 ++nextAlloc2ndIndex;
10735 if(lastOffset < freeSpace2ndTo1stEnd)
10738 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10745 lastOffset = freeSpace2ndTo1stEnd;
10750 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10751 const VkDeviceSize freeSpace1stTo2ndEnd =
10752 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10753 while(lastOffset < freeSpace1stTo2ndEnd)
10756 while(nextAlloc1stIndex < suballoc1stCount &&
10757 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10759 ++nextAlloc1stIndex;
10763 if(nextAlloc1stIndex < suballoc1stCount)
10765 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10768 if(lastOffset < suballoc.offset)
10771 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10782 lastOffset = suballoc.offset + suballoc.size;
10783 ++nextAlloc1stIndex;
10788 if(lastOffset < freeSpace1stTo2ndEnd)
10791 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10798 lastOffset = freeSpace1stTo2ndEnd;
10802 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10804 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10805 while(lastOffset < size)
10808 while(nextAlloc2ndIndex != SIZE_MAX &&
10809 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10811 --nextAlloc2ndIndex;
10815 if(nextAlloc2ndIndex != SIZE_MAX)
10817 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10820 if(lastOffset < suballoc.offset)
10823 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10834 lastOffset = suballoc.offset + suballoc.size;
10835 --nextAlloc2ndIndex;
10840 if(lastOffset < size)
10843 const VkDeviceSize unusedRangeSize = size - lastOffset;
10856 #if VMA_STATS_STRING_ENABLED
10857 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10859 const VkDeviceSize size = GetSize();
10860 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10861 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10862 const size_t suballoc1stCount = suballocations1st.size();
10863 const size_t suballoc2ndCount = suballocations2nd.size();
10867 size_t unusedRangeCount = 0;
10868 VkDeviceSize usedBytes = 0;
10870 VkDeviceSize lastOffset = 0;
10872 size_t alloc2ndCount = 0;
10873 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10875 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10876 size_t nextAlloc2ndIndex = 0;
10877 while(lastOffset < freeSpace2ndTo1stEnd)
10880 while(nextAlloc2ndIndex < suballoc2ndCount &&
10881 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10883 ++nextAlloc2ndIndex;
10887 if(nextAlloc2ndIndex < suballoc2ndCount)
10889 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10892 if(lastOffset < suballoc.offset)
10895 ++unusedRangeCount;
10901 usedBytes += suballoc.size;
10904 lastOffset = suballoc.offset + suballoc.size;
10905 ++nextAlloc2ndIndex;
10910 if(lastOffset < freeSpace2ndTo1stEnd)
10913 ++unusedRangeCount;
10917 lastOffset = freeSpace2ndTo1stEnd;
10922 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10923 size_t alloc1stCount = 0;
10924 const VkDeviceSize freeSpace1stTo2ndEnd =
10925 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10926 while(lastOffset < freeSpace1stTo2ndEnd)
10929 while(nextAlloc1stIndex < suballoc1stCount &&
10930 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10932 ++nextAlloc1stIndex;
10936 if(nextAlloc1stIndex < suballoc1stCount)
10938 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10941 if(lastOffset < suballoc.offset)
10944 ++unusedRangeCount;
10950 usedBytes += suballoc.size;
10953 lastOffset = suballoc.offset + suballoc.size;
10954 ++nextAlloc1stIndex;
10959 if(lastOffset < size)
10962 ++unusedRangeCount;
10966 lastOffset = freeSpace1stTo2ndEnd;
10970 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10972 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10973 while(lastOffset < size)
10976 while(nextAlloc2ndIndex != SIZE_MAX &&
10977 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10979 --nextAlloc2ndIndex;
10983 if(nextAlloc2ndIndex != SIZE_MAX)
10985 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10988 if(lastOffset < suballoc.offset)
10991 ++unusedRangeCount;
10997 usedBytes += suballoc.size;
11000 lastOffset = suballoc.offset + suballoc.size;
11001 --nextAlloc2ndIndex;
11006 if(lastOffset < size)
11009 ++unusedRangeCount;
11018 const VkDeviceSize unusedBytes = size - usedBytes;
11019 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
11024 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11026 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
11027 size_t nextAlloc2ndIndex = 0;
11028 while(lastOffset < freeSpace2ndTo1stEnd)
11031 while(nextAlloc2ndIndex < suballoc2ndCount &&
11032 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11034 ++nextAlloc2ndIndex;
11038 if(nextAlloc2ndIndex < suballoc2ndCount)
11040 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11043 if(lastOffset < suballoc.offset)
11046 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11047 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11052 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11055 lastOffset = suballoc.offset + suballoc.size;
11056 ++nextAlloc2ndIndex;
11061 if(lastOffset < freeSpace2ndTo1stEnd)
11064 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
11065 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11069 lastOffset = freeSpace2ndTo1stEnd;
11074 nextAlloc1stIndex = m_1stNullItemsBeginCount;
11075 while(lastOffset < freeSpace1stTo2ndEnd)
11078 while(nextAlloc1stIndex < suballoc1stCount &&
11079 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11081 ++nextAlloc1stIndex;
11085 if(nextAlloc1stIndex < suballoc1stCount)
11087 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11090 if(lastOffset < suballoc.offset)
11093 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11094 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11099 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11102 lastOffset = suballoc.offset + suballoc.size;
11103 ++nextAlloc1stIndex;
11108 if(lastOffset < freeSpace1stTo2ndEnd)
11111 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
11112 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11116 lastOffset = freeSpace1stTo2ndEnd;
11120 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11122 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11123 while(lastOffset < size)
11126 while(nextAlloc2ndIndex != SIZE_MAX &&
11127 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11129 --nextAlloc2ndIndex;
11133 if(nextAlloc2ndIndex != SIZE_MAX)
11135 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11138 if(lastOffset < suballoc.offset)
11141 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11142 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11147 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11150 lastOffset = suballoc.offset + suballoc.size;
11151 --nextAlloc2ndIndex;
11156 if(lastOffset < size)
11159 const VkDeviceSize unusedRangeSize = size - lastOffset;
11160 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11169 PrintDetailedMap_End(json);
11173 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
11174 uint32_t currentFrameIndex,
11175 uint32_t frameInUseCount,
11176 VkDeviceSize bufferImageGranularity,
11177 VkDeviceSize allocSize,
11178 VkDeviceSize allocAlignment,
11180 VmaSuballocationType allocType,
11181 bool canMakeOtherLost,
11183 VmaAllocationRequest* pAllocationRequest)
11185 VMA_ASSERT(allocSize > 0);
11186 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
11187 VMA_ASSERT(pAllocationRequest != VMA_NULL);
11188 VMA_HEAVY_ASSERT(Validate());
11189 return upperAddress ?
11190 CreateAllocationRequest_UpperAddress(
11191 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11192 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
11193 CreateAllocationRequest_LowerAddress(
11194 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11195 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
11198 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
11199 uint32_t currentFrameIndex,
11200 uint32_t frameInUseCount,
11201 VkDeviceSize bufferImageGranularity,
11202 VkDeviceSize allocSize,
11203 VkDeviceSize allocAlignment,
11204 VmaSuballocationType allocType,
11205 bool canMakeOtherLost,
11207 VmaAllocationRequest* pAllocationRequest)
11209 const VkDeviceSize size = GetSize();
11210 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11211 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11213 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11215 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
11220 if(allocSize > size)
11224 VkDeviceSize resultBaseOffset = size - allocSize;
11225 if(!suballocations2nd.empty())
11227 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11228 resultBaseOffset = lastSuballoc.offset - allocSize;
11229 if(allocSize > lastSuballoc.offset)
11236 VkDeviceSize resultOffset = resultBaseOffset;
11239 if(VMA_DEBUG_MARGIN > 0)
11241 if(resultOffset < VMA_DEBUG_MARGIN)
11245 resultOffset -= VMA_DEBUG_MARGIN;
11249 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
11253 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11255 bool bufferImageGranularityConflict =
false;
11256 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11258 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11259 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11261 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
11263 bufferImageGranularityConflict =
true;
11271 if(bufferImageGranularityConflict)
11273 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
11278 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
11279 suballocations1st.back().offset + suballocations1st.back().size :
11281 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
11285 if(bufferImageGranularity > 1)
11287 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11289 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11290 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11292 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11306 pAllocationRequest->offset = resultOffset;
11307 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11308 pAllocationRequest->sumItemSize = 0;
11310 pAllocationRequest->itemsToMakeLostCount = 0;
11311 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11318 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11319 uint32_t currentFrameIndex,
11320 uint32_t frameInUseCount,
11321 VkDeviceSize bufferImageGranularity,
11322 VkDeviceSize allocSize,
11323 VkDeviceSize allocAlignment,
11324 VmaSuballocationType allocType,
11325 bool canMakeOtherLost,
11327 VmaAllocationRequest* pAllocationRequest)
11329 const VkDeviceSize size = GetSize();
11330 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11331 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11333 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11337 VkDeviceSize resultBaseOffset = 0;
11338 if(!suballocations1st.empty())
11340 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11341 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11345 VkDeviceSize resultOffset = resultBaseOffset;
11348 if(VMA_DEBUG_MARGIN > 0)
11350 resultOffset += VMA_DEBUG_MARGIN;
11354 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11358 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
11360 bool bufferImageGranularityConflict =
false;
11361 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11363 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11364 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11366 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11368 bufferImageGranularityConflict =
true;
11376 if(bufferImageGranularityConflict)
11378 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11382 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11383 suballocations2nd.back().offset : size;
11386 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11390 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11392 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11394 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11395 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11397 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11411 pAllocationRequest->offset = resultOffset;
11412 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11413 pAllocationRequest->sumItemSize = 0;
11415 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11416 pAllocationRequest->itemsToMakeLostCount = 0;
11423 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11425 VMA_ASSERT(!suballocations1st.empty());
11427 VkDeviceSize resultBaseOffset = 0;
11428 if(!suballocations2nd.empty())
11430 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11431 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11435 VkDeviceSize resultOffset = resultBaseOffset;
11438 if(VMA_DEBUG_MARGIN > 0)
11440 resultOffset += VMA_DEBUG_MARGIN;
11444 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11448 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11450 bool bufferImageGranularityConflict =
false;
11451 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11453 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11454 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11456 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11458 bufferImageGranularityConflict =
true;
11466 if(bufferImageGranularityConflict)
11468 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11472 pAllocationRequest->itemsToMakeLostCount = 0;
11473 pAllocationRequest->sumItemSize = 0;
11474 size_t index1st = m_1stNullItemsBeginCount;
11476 if(canMakeOtherLost)
11478 while(index1st < suballocations1st.size() &&
11479 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11482 const VmaSuballocation& suballoc = suballocations1st[index1st];
11483 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11489 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11490 if(suballoc.hAllocation->CanBecomeLost() &&
11491 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11493 ++pAllocationRequest->itemsToMakeLostCount;
11494 pAllocationRequest->sumItemSize += suballoc.size;
11506 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11508 while(index1st < suballocations1st.size())
11510 const VmaSuballocation& suballoc = suballocations1st[index1st];
11511 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11513 if(suballoc.hAllocation != VK_NULL_HANDLE)
11516 if(suballoc.hAllocation->CanBecomeLost() &&
11517 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11519 ++pAllocationRequest->itemsToMakeLostCount;
11520 pAllocationRequest->sumItemSize += suballoc.size;
11538 if(index1st == suballocations1st.size() &&
11539 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11542 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11547 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11548 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11552 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11554 for(
size_t nextSuballocIndex = index1st;
11555 nextSuballocIndex < suballocations1st.size();
11556 nextSuballocIndex++)
11558 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11559 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11561 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11575 pAllocationRequest->offset = resultOffset;
11576 pAllocationRequest->sumFreeSize =
11577 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11579 - pAllocationRequest->sumItemSize;
11580 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11589 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11590 uint32_t currentFrameIndex,
11591 uint32_t frameInUseCount,
11592 VmaAllocationRequest* pAllocationRequest)
11594 if(pAllocationRequest->itemsToMakeLostCount == 0)
11599 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11602 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11603 size_t index = m_1stNullItemsBeginCount;
11604 size_t madeLostCount = 0;
11605 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11607 if(index == suballocations->size())
11611 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11613 suballocations = &AccessSuballocations2nd();
11617 VMA_ASSERT(!suballocations->empty());
11619 VmaSuballocation& suballoc = (*suballocations)[index];
11620 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11622 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11623 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11624 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11626 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11627 suballoc.hAllocation = VK_NULL_HANDLE;
11628 m_SumFreeSize += suballoc.size;
11629 if(suballocations == &AccessSuballocations1st())
11631 ++m_1stNullItemsMiddleCount;
11635 ++m_2ndNullItemsCount;
11647 CleanupAfterFree();
11653 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11655 uint32_t lostAllocationCount = 0;
11657 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11658 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11660 VmaSuballocation& suballoc = suballocations1st[i];
11661 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11662 suballoc.hAllocation->CanBecomeLost() &&
11663 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11665 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11666 suballoc.hAllocation = VK_NULL_HANDLE;
11667 ++m_1stNullItemsMiddleCount;
11668 m_SumFreeSize += suballoc.size;
11669 ++lostAllocationCount;
11673 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11674 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11676 VmaSuballocation& suballoc = suballocations2nd[i];
11677 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11678 suballoc.hAllocation->CanBecomeLost() &&
11679 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11681 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11682 suballoc.hAllocation = VK_NULL_HANDLE;
11683 ++m_2ndNullItemsCount;
11684 m_SumFreeSize += suballoc.size;
11685 ++lostAllocationCount;
11689 if(lostAllocationCount)
11691 CleanupAfterFree();
11694 return lostAllocationCount;
11697 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11699 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11700 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11702 const VmaSuballocation& suballoc = suballocations1st[i];
11703 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11705 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11707 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11708 return VK_ERROR_VALIDATION_FAILED_EXT;
11710 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11712 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11713 return VK_ERROR_VALIDATION_FAILED_EXT;
11718 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11719 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11721 const VmaSuballocation& suballoc = suballocations2nd[i];
11722 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11724 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11726 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11727 return VK_ERROR_VALIDATION_FAILED_EXT;
11729 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11731 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11732 return VK_ERROR_VALIDATION_FAILED_EXT;
11740 void VmaBlockMetadata_Linear::Alloc(
11741 const VmaAllocationRequest& request,
11742 VmaSuballocationType type,
11743 VkDeviceSize allocSize,
11746 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11748 switch(request.type)
11750 case VmaAllocationRequestType::UpperAddress:
11752 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11753 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11754 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11755 suballocations2nd.push_back(newSuballoc);
11756 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11759 case VmaAllocationRequestType::EndOf1st:
11761 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11763 VMA_ASSERT(suballocations1st.empty() ||
11764 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11766 VMA_ASSERT(request.offset + allocSize <= GetSize());
11768 suballocations1st.push_back(newSuballoc);
11771 case VmaAllocationRequestType::EndOf2nd:
11773 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11775 VMA_ASSERT(!suballocations1st.empty() &&
11776 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11777 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11779 switch(m_2ndVectorMode)
11781 case SECOND_VECTOR_EMPTY:
11783 VMA_ASSERT(suballocations2nd.empty());
11784 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11786 case SECOND_VECTOR_RING_BUFFER:
11788 VMA_ASSERT(!suballocations2nd.empty());
11790 case SECOND_VECTOR_DOUBLE_STACK:
11791 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11797 suballocations2nd.push_back(newSuballoc);
11801 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11804 m_SumFreeSize -= newSuballoc.size;
11807 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11809 FreeAtOffset(allocation->GetOffset());
11812 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11814 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11815 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11817 if(!suballocations1st.empty())
11820 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11821 if(firstSuballoc.offset == offset)
11823 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11824 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11825 m_SumFreeSize += firstSuballoc.size;
11826 ++m_1stNullItemsBeginCount;
11827 CleanupAfterFree();
11833 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11834 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11836 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11837 if(lastSuballoc.offset == offset)
11839 m_SumFreeSize += lastSuballoc.size;
11840 suballocations2nd.pop_back();
11841 CleanupAfterFree();
11846 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11848 VmaSuballocation& lastSuballoc = suballocations1st.back();
11849 if(lastSuballoc.offset == offset)
11851 m_SumFreeSize += lastSuballoc.size;
11852 suballocations1st.pop_back();
11853 CleanupAfterFree();
11860 VmaSuballocation refSuballoc;
11861 refSuballoc.offset = offset;
11863 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11864 suballocations1st.begin() + m_1stNullItemsBeginCount,
11865 suballocations1st.end(),
11867 VmaSuballocationOffsetLess());
11868 if(it != suballocations1st.end())
11870 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11871 it->hAllocation = VK_NULL_HANDLE;
11872 ++m_1stNullItemsMiddleCount;
11873 m_SumFreeSize += it->size;
11874 CleanupAfterFree();
11879 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11882 VmaSuballocation refSuballoc;
11883 refSuballoc.offset = offset;
11885 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11886 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11887 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11888 if(it != suballocations2nd.end())
11890 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11891 it->hAllocation = VK_NULL_HANDLE;
11892 ++m_2ndNullItemsCount;
11893 m_SumFreeSize += it->size;
11894 CleanupAfterFree();
11899 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11902 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11904 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11905 const size_t suballocCount = AccessSuballocations1st().size();
11906 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11909 void VmaBlockMetadata_Linear::CleanupAfterFree()
11911 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11912 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11916 suballocations1st.clear();
11917 suballocations2nd.clear();
11918 m_1stNullItemsBeginCount = 0;
11919 m_1stNullItemsMiddleCount = 0;
11920 m_2ndNullItemsCount = 0;
11921 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11925 const size_t suballoc1stCount = suballocations1st.size();
11926 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11927 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11930 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11931 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11933 ++m_1stNullItemsBeginCount;
11934 --m_1stNullItemsMiddleCount;
11938 while(m_1stNullItemsMiddleCount > 0 &&
11939 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11941 --m_1stNullItemsMiddleCount;
11942 suballocations1st.pop_back();
11946 while(m_2ndNullItemsCount > 0 &&
11947 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11949 --m_2ndNullItemsCount;
11950 suballocations2nd.pop_back();
11954 while(m_2ndNullItemsCount > 0 &&
11955 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11957 --m_2ndNullItemsCount;
11958 VmaVectorRemove(suballocations2nd, 0);
11961 if(ShouldCompact1st())
11963 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11964 size_t srcIndex = m_1stNullItemsBeginCount;
11965 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11967 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11971 if(dstIndex != srcIndex)
11973 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11977 suballocations1st.resize(nonNullItemCount);
11978 m_1stNullItemsBeginCount = 0;
11979 m_1stNullItemsMiddleCount = 0;
11983 if(suballocations2nd.empty())
11985 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11989 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11991 suballocations1st.clear();
11992 m_1stNullItemsBeginCount = 0;
11994 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11997 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11998 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11999 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
12000 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12002 ++m_1stNullItemsBeginCount;
12003 --m_1stNullItemsMiddleCount;
12005 m_2ndNullItemsCount = 0;
12006 m_1stVectorIndex ^= 1;
12011 VMA_HEAVY_ASSERT(Validate());
12018 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
12019 VmaBlockMetadata(hAllocator),
12021 m_AllocationCount(0),
12025 memset(m_FreeList, 0,
sizeof(m_FreeList));
12028 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
12030 DeleteNode(m_Root);
12033 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
12035 VmaBlockMetadata::Init(size);
12037 m_UsableSize = VmaPrevPow2(size);
12038 m_SumFreeSize = m_UsableSize;
12042 while(m_LevelCount < MAX_LEVELS &&
12043 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
12048 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
12049 rootNode->offset = 0;
12050 rootNode->type = Node::TYPE_FREE;
12051 rootNode->parent = VMA_NULL;
12052 rootNode->buddy = VMA_NULL;
12055 AddToFreeListFront(0, rootNode);
12058 bool VmaBlockMetadata_Buddy::Validate()
const
12061 ValidationContext ctx;
12062 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
12064 VMA_VALIDATE(
false &&
"ValidateNode failed.");
12066 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
12067 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
12070 for(uint32_t level = 0; level < m_LevelCount; ++level)
12072 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
12073 m_FreeList[level].front->free.prev == VMA_NULL);
12075 for(Node* node = m_FreeList[level].front;
12077 node = node->free.next)
12079 VMA_VALIDATE(node->type == Node::TYPE_FREE);
12081 if(node->free.next == VMA_NULL)
12083 VMA_VALIDATE(m_FreeList[level].back == node);
12087 VMA_VALIDATE(node->free.next->free.prev == node);
12093 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
12095 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
12101 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
12103 for(uint32_t level = 0; level < m_LevelCount; ++level)
12105 if(m_FreeList[level].front != VMA_NULL)
12107 return LevelToNodeSize(level);
12113 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
12115 const VkDeviceSize unusableSize = GetUnusableSize();
12126 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
12128 if(unusableSize > 0)
12137 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
12139 const VkDeviceSize unusableSize = GetUnusableSize();
12141 inoutStats.
size += GetSize();
12142 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
12147 if(unusableSize > 0)
12154 #if VMA_STATS_STRING_ENABLED
12156 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
12160 CalcAllocationStatInfo(stat);
12162 PrintDetailedMap_Begin(
12168 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
12170 const VkDeviceSize unusableSize = GetUnusableSize();
12171 if(unusableSize > 0)
12173 PrintDetailedMap_UnusedRange(json,
12178 PrintDetailedMap_End(json);
12183 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
12184 uint32_t currentFrameIndex,
12185 uint32_t frameInUseCount,
12186 VkDeviceSize bufferImageGranularity,
12187 VkDeviceSize allocSize,
12188 VkDeviceSize allocAlignment,
12190 VmaSuballocationType allocType,
12191 bool canMakeOtherLost,
12193 VmaAllocationRequest* pAllocationRequest)
12195 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
12199 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
12200 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
12201 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
12203 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
12204 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
12207 if(allocSize > m_UsableSize)
12212 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12213 for(uint32_t level = targetLevel + 1; level--; )
12215 for(Node* freeNode = m_FreeList[level].front;
12216 freeNode != VMA_NULL;
12217 freeNode = freeNode->free.next)
12219 if(freeNode->offset % allocAlignment == 0)
12221 pAllocationRequest->type = VmaAllocationRequestType::Normal;
12222 pAllocationRequest->offset = freeNode->offset;
12223 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
12224 pAllocationRequest->sumItemSize = 0;
12225 pAllocationRequest->itemsToMakeLostCount = 0;
12226 pAllocationRequest->customData = (
void*)(uintptr_t)level;
12235 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
12236 uint32_t currentFrameIndex,
12237 uint32_t frameInUseCount,
12238 VmaAllocationRequest* pAllocationRequest)
12244 return pAllocationRequest->itemsToMakeLostCount == 0;
12247 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
12256 void VmaBlockMetadata_Buddy::Alloc(
12257 const VmaAllocationRequest& request,
12258 VmaSuballocationType type,
12259 VkDeviceSize allocSize,
12262 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
12264 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12265 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
12267 Node* currNode = m_FreeList[currLevel].front;
12268 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12269 while(currNode->offset != request.offset)
12271 currNode = currNode->free.next;
12272 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12276 while(currLevel < targetLevel)
12280 RemoveFromFreeList(currLevel, currNode);
12282 const uint32_t childrenLevel = currLevel + 1;
12285 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12286 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12288 leftChild->offset = currNode->offset;
12289 leftChild->type = Node::TYPE_FREE;
12290 leftChild->parent = currNode;
12291 leftChild->buddy = rightChild;
12293 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12294 rightChild->type = Node::TYPE_FREE;
12295 rightChild->parent = currNode;
12296 rightChild->buddy = leftChild;
12299 currNode->type = Node::TYPE_SPLIT;
12300 currNode->split.leftChild = leftChild;
12303 AddToFreeListFront(childrenLevel, rightChild);
12304 AddToFreeListFront(childrenLevel, leftChild);
12309 currNode = m_FreeList[currLevel].front;
12318 VMA_ASSERT(currLevel == targetLevel &&
12319 currNode != VMA_NULL &&
12320 currNode->type == Node::TYPE_FREE);
12321 RemoveFromFreeList(currLevel, currNode);
12324 currNode->type = Node::TYPE_ALLOCATION;
12325 currNode->allocation.alloc = hAllocation;
12327 ++m_AllocationCount;
12329 m_SumFreeSize -= allocSize;
12332 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12334 if(node->type == Node::TYPE_SPLIT)
12336 DeleteNode(node->split.leftChild->buddy);
12337 DeleteNode(node->split.leftChild);
12340 vma_delete(GetAllocationCallbacks(), node);
12343 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12345 VMA_VALIDATE(level < m_LevelCount);
12346 VMA_VALIDATE(curr->parent == parent);
12347 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12348 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12351 case Node::TYPE_FREE:
12353 ctx.calculatedSumFreeSize += levelNodeSize;
12354 ++ctx.calculatedFreeCount;
12356 case Node::TYPE_ALLOCATION:
12357 ++ctx.calculatedAllocationCount;
12358 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12359 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12361 case Node::TYPE_SPLIT:
12363 const uint32_t childrenLevel = level + 1;
12364 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12365 const Node*
const leftChild = curr->split.leftChild;
12366 VMA_VALIDATE(leftChild != VMA_NULL);
12367 VMA_VALIDATE(leftChild->offset == curr->offset);
12368 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12370 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12372 const Node*
const rightChild = leftChild->buddy;
12373 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12374 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12376 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12387 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12390 uint32_t level = 0;
12391 VkDeviceSize currLevelNodeSize = m_UsableSize;
12392 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12393 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12396 currLevelNodeSize = nextLevelNodeSize;
12397 nextLevelNodeSize = currLevelNodeSize >> 1;
12402 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12405 Node* node = m_Root;
12406 VkDeviceSize nodeOffset = 0;
12407 uint32_t level = 0;
12408 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12409 while(node->type == Node::TYPE_SPLIT)
12411 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12412 if(offset < nodeOffset + nextLevelSize)
12414 node = node->split.leftChild;
12418 node = node->split.leftChild->buddy;
12419 nodeOffset += nextLevelSize;
12422 levelNodeSize = nextLevelSize;
12425 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12426 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12429 --m_AllocationCount;
12430 m_SumFreeSize += alloc->GetSize();
12432 node->type = Node::TYPE_FREE;
12435 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12437 RemoveFromFreeList(level, node->buddy);
12438 Node*
const parent = node->parent;
12440 vma_delete(GetAllocationCallbacks(), node->buddy);
12441 vma_delete(GetAllocationCallbacks(), node);
12442 parent->type = Node::TYPE_FREE;
12450 AddToFreeListFront(level, node);
12453 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12457 case Node::TYPE_FREE:
12463 case Node::TYPE_ALLOCATION:
12465 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12471 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12472 if(unusedRangeSize > 0)
12481 case Node::TYPE_SPLIT:
12483 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12484 const Node*
const leftChild = node->split.leftChild;
12485 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12486 const Node*
const rightChild = leftChild->buddy;
12487 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12495 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12497 VMA_ASSERT(node->type == Node::TYPE_FREE);
12500 Node*
const frontNode = m_FreeList[level].front;
12501 if(frontNode == VMA_NULL)
12503 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12504 node->free.prev = node->free.next = VMA_NULL;
12505 m_FreeList[level].front = m_FreeList[level].back = node;
12509 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12510 node->free.prev = VMA_NULL;
12511 node->free.next = frontNode;
12512 frontNode->free.prev = node;
12513 m_FreeList[level].front = node;
12517 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12519 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12522 if(node->free.prev == VMA_NULL)
12524 VMA_ASSERT(m_FreeList[level].front == node);
12525 m_FreeList[level].front = node->free.next;
12529 Node*
const prevFreeNode = node->free.prev;
12530 VMA_ASSERT(prevFreeNode->free.next == node);
12531 prevFreeNode->free.next = node->free.next;
12535 if(node->free.next == VMA_NULL)
12537 VMA_ASSERT(m_FreeList[level].back == node);
12538 m_FreeList[level].back = node->free.prev;
12542 Node*
const nextFreeNode = node->free.next;
12543 VMA_ASSERT(nextFreeNode->free.prev == node);
12544 nextFreeNode->free.prev = node->free.prev;
12548 #if VMA_STATS_STRING_ENABLED
12549 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12553 case Node::TYPE_FREE:
12554 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12556 case Node::TYPE_ALLOCATION:
12558 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12559 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12560 if(allocSize < levelNodeSize)
12562 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12566 case Node::TYPE_SPLIT:
12568 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12569 const Node*
const leftChild = node->split.leftChild;
12570 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12571 const Node*
const rightChild = leftChild->buddy;
12572 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12585 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12586 m_pMetadata(VMA_NULL),
12587 m_MemoryTypeIndex(UINT32_MAX),
12589 m_hMemory(VK_NULL_HANDLE),
12591 m_pMappedData(VMA_NULL)
12595 void VmaDeviceMemoryBlock::Init(
12598 uint32_t newMemoryTypeIndex,
12599 VkDeviceMemory newMemory,
12600 VkDeviceSize newSize,
12602 uint32_t algorithm)
12604 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12606 m_hParentPool = hParentPool;
12607 m_MemoryTypeIndex = newMemoryTypeIndex;
12609 m_hMemory = newMemory;
12614 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12617 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12623 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12625 m_pMetadata->Init(newSize);
12628 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12632 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12634 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12635 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12636 m_hMemory = VK_NULL_HANDLE;
12638 vma_delete(allocator, m_pMetadata);
12639 m_pMetadata = VMA_NULL;
12642 bool VmaDeviceMemoryBlock::Validate()
const
12644 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12645 (m_pMetadata->GetSize() != 0));
12647 return m_pMetadata->Validate();
12650 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12652 void* pData =
nullptr;
12653 VkResult res = Map(hAllocator, 1, &pData);
12654 if(res != VK_SUCCESS)
12659 res = m_pMetadata->CheckCorruption(pData);
12661 Unmap(hAllocator, 1);
12666 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12673 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12674 if(m_MapCount != 0)
12676 m_MapCount += count;
12677 VMA_ASSERT(m_pMappedData != VMA_NULL);
12678 if(ppData != VMA_NULL)
12680 *ppData = m_pMappedData;
12686 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12687 hAllocator->m_hDevice,
12693 if(result == VK_SUCCESS)
12695 if(ppData != VMA_NULL)
12697 *ppData = m_pMappedData;
12699 m_MapCount = count;
12705 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12712 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12713 if(m_MapCount >= count)
12715 m_MapCount -= count;
12716 if(m_MapCount == 0)
12718 m_pMappedData = VMA_NULL;
12719 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12724 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12728 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12730 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12731 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12734 VkResult res = Map(hAllocator, 1, &pData);
12735 if(res != VK_SUCCESS)
12740 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12741 VmaWriteMagicValue(pData, allocOffset + allocSize);
12743 Unmap(hAllocator, 1);
12748 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12750 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12751 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12754 VkResult res = Map(hAllocator, 1, &pData);
12755 if(res != VK_SUCCESS)
12760 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12762 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12764 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12766 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12769 Unmap(hAllocator, 1);
12774 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12777 VkDeviceSize allocationLocalOffset,
12781 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12782 hAllocation->GetBlock() ==
this);
12783 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12784 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12785 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12787 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12788 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12791 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12794 VkDeviceSize allocationLocalOffset,
12798 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12799 hAllocation->GetBlock() ==
this);
12800 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12801 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12802 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12804 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12805 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12810 memset(&outInfo, 0,
sizeof(outInfo));
12829 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12837 VmaPool_T::VmaPool_T(
12840 VkDeviceSize preferredBlockSize) :
12844 createInfo.memoryTypeIndex,
12845 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12846 createInfo.minBlockCount,
12847 createInfo.maxBlockCount,
12849 createInfo.frameInUseCount,
12850 createInfo.blockSize != 0,
12852 createInfo.priority),
12858 VmaPool_T::~VmaPool_T()
12860 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
12863 void VmaPool_T::SetName(
const char* pName)
12865 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12866 VmaFreeString(allocs, m_Name);
12868 if(pName != VMA_NULL)
12870 m_Name = VmaCreateStringCopy(allocs, pName);
12878 #if VMA_STATS_STRING_ENABLED
12882 VmaBlockVector::VmaBlockVector(
12885 uint32_t memoryTypeIndex,
12886 VkDeviceSize preferredBlockSize,
12887 size_t minBlockCount,
12888 size_t maxBlockCount,
12889 VkDeviceSize bufferImageGranularity,
12890 uint32_t frameInUseCount,
12891 bool explicitBlockSize,
12892 uint32_t algorithm,
12894 m_hAllocator(hAllocator),
12895 m_hParentPool(hParentPool),
12896 m_MemoryTypeIndex(memoryTypeIndex),
12897 m_PreferredBlockSize(preferredBlockSize),
12898 m_MinBlockCount(minBlockCount),
12899 m_MaxBlockCount(maxBlockCount),
12900 m_BufferImageGranularity(bufferImageGranularity),
12901 m_FrameInUseCount(frameInUseCount),
12902 m_ExplicitBlockSize(explicitBlockSize),
12903 m_Algorithm(algorithm),
12904 m_Priority(priority),
12905 m_HasEmptyBlock(false),
12906 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12911 VmaBlockVector::~VmaBlockVector()
12913 for(
size_t i = m_Blocks.size(); i--; )
12915 m_Blocks[i]->Destroy(m_hAllocator);
12916 vma_delete(m_hAllocator, m_Blocks[i]);
12920 VkResult VmaBlockVector::CreateMinBlocks()
12922 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12924 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12925 if(res != VK_SUCCESS)
12933 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12935 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12937 const size_t blockCount = m_Blocks.size();
12946 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12948 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12949 VMA_ASSERT(pBlock);
12950 VMA_HEAVY_ASSERT(pBlock->Validate());
12951 pBlock->m_pMetadata->AddPoolStats(*pStats);
12955 bool VmaBlockVector::IsEmpty()
12957 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12958 return m_Blocks.empty();
12961 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12963 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12964 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12965 (VMA_DEBUG_MARGIN > 0) &&
12967 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12970 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12972 VkResult VmaBlockVector::Allocate(
12973 uint32_t currentFrameIndex,
12975 VkDeviceSize alignment,
12977 VmaSuballocationType suballocType,
12978 size_t allocationCount,
12982 VkResult res = VK_SUCCESS;
12984 if(IsCorruptionDetectionEnabled())
12986 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12987 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12991 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12992 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12994 res = AllocatePage(
13000 pAllocations + allocIndex);
13001 if(res != VK_SUCCESS)
13008 if(res != VK_SUCCESS)
13011 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13012 while(allocIndex--)
13014 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
13015 const VkDeviceSize allocSize = alloc->GetSize();
13017 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
13019 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
13025 VkResult VmaBlockVector::AllocatePage(
13026 uint32_t currentFrameIndex,
13028 VkDeviceSize alignment,
13030 VmaSuballocationType suballocType,
13038 VkDeviceSize freeMemory;
13040 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13042 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13046 const bool canFallbackToDedicated = !IsCustomPool();
13047 const bool canCreateNewBlock =
13049 (m_Blocks.size() < m_MaxBlockCount) &&
13050 (freeMemory >= size || !canFallbackToDedicated);
13057 canMakeOtherLost =
false;
13061 if(isUpperAddress &&
13064 return VK_ERROR_FEATURE_NOT_PRESENT;
13078 return VK_ERROR_FEATURE_NOT_PRESENT;
13082 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
13084 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13092 if(!canMakeOtherLost || canCreateNewBlock)
13101 if(!m_Blocks.empty())
13103 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
13104 VMA_ASSERT(pCurrBlock);
13105 VkResult res = AllocateFromBlock(
13115 if(res == VK_SUCCESS)
13117 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
13127 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13129 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13130 VMA_ASSERT(pCurrBlock);
13131 VkResult res = AllocateFromBlock(
13141 if(res == VK_SUCCESS)
13143 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13151 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13153 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13154 VMA_ASSERT(pCurrBlock);
13155 VkResult res = AllocateFromBlock(
13165 if(res == VK_SUCCESS)
13167 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13175 if(canCreateNewBlock)
13178 VkDeviceSize newBlockSize = m_PreferredBlockSize;
13179 uint32_t newBlockSizeShift = 0;
13180 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
13182 if(!m_ExplicitBlockSize)
13185 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
13186 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
13188 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13189 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
13191 newBlockSize = smallerNewBlockSize;
13192 ++newBlockSizeShift;
13201 size_t newBlockIndex = 0;
13202 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13203 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13205 if(!m_ExplicitBlockSize)
13207 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
13209 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13210 if(smallerNewBlockSize >= size)
13212 newBlockSize = smallerNewBlockSize;
13213 ++newBlockSizeShift;
13214 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13215 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13224 if(res == VK_SUCCESS)
13226 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
13227 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
13229 res = AllocateFromBlock(
13239 if(res == VK_SUCCESS)
13241 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
13247 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13254 if(canMakeOtherLost)
13256 uint32_t tryIndex = 0;
13257 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
13259 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
13260 VmaAllocationRequest bestRequest = {};
13261 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
13267 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13269 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13270 VMA_ASSERT(pCurrBlock);
13271 VmaAllocationRequest currRequest = {};
13272 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13275 m_BufferImageGranularity,
13284 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13285 if(pBestRequestBlock == VMA_NULL ||
13286 currRequestCost < bestRequestCost)
13288 pBestRequestBlock = pCurrBlock;
13289 bestRequest = currRequest;
13290 bestRequestCost = currRequestCost;
13292 if(bestRequestCost == 0)
13303 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13305 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13306 VMA_ASSERT(pCurrBlock);
13307 VmaAllocationRequest currRequest = {};
13308 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13311 m_BufferImageGranularity,
13320 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13321 if(pBestRequestBlock == VMA_NULL ||
13322 currRequestCost < bestRequestCost ||
13325 pBestRequestBlock = pCurrBlock;
13326 bestRequest = currRequest;
13327 bestRequestCost = currRequestCost;
13329 if(bestRequestCost == 0 ||
13339 if(pBestRequestBlock != VMA_NULL)
13343 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13344 if(res != VK_SUCCESS)
13350 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13356 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13357 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13358 UpdateHasEmptyBlock();
13359 (*pAllocation)->InitBlockAllocation(
13361 bestRequest.offset,
13368 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13369 VMA_DEBUG_LOG(
" Returned from existing block");
13370 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13371 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13372 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13374 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13376 if(IsCorruptionDetectionEnabled())
13378 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13379 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13394 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13396 return VK_ERROR_TOO_MANY_OBJECTS;
13400 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13403 void VmaBlockVector::Free(
13406 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13408 bool budgetExceeded =
false;
13410 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13412 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13413 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13418 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13420 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13422 if(IsCorruptionDetectionEnabled())
13424 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13425 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13428 if(hAllocation->IsPersistentMap())
13430 pBlock->Unmap(m_hAllocator, 1);
13433 pBlock->m_pMetadata->Free(hAllocation);
13434 VMA_HEAVY_ASSERT(pBlock->Validate());
13436 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13438 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13440 if(pBlock->m_pMetadata->IsEmpty())
13443 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13445 pBlockToDelete = pBlock;
13452 else if(m_HasEmptyBlock && canDeleteBlock)
13454 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13455 if(pLastBlock->m_pMetadata->IsEmpty())
13457 pBlockToDelete = pLastBlock;
13458 m_Blocks.pop_back();
13462 UpdateHasEmptyBlock();
13463 IncrementallySortBlocks();
13468 if(pBlockToDelete != VMA_NULL)
13470 VMA_DEBUG_LOG(
" Deleted empty block");
13471 pBlockToDelete->Destroy(m_hAllocator);
13472 vma_delete(m_hAllocator, pBlockToDelete);
13476 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13478 VkDeviceSize result = 0;
13479 for(
size_t i = m_Blocks.size(); i--; )
13481 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13482 if(result >= m_PreferredBlockSize)
13490 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13492 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13494 if(m_Blocks[blockIndex] == pBlock)
13496 VmaVectorRemove(m_Blocks, blockIndex);
13503 void VmaBlockVector::IncrementallySortBlocks()
13508 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13510 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13512 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13519 VkResult VmaBlockVector::AllocateFromBlock(
13520 VmaDeviceMemoryBlock* pBlock,
13521 uint32_t currentFrameIndex,
13523 VkDeviceSize alignment,
13526 VmaSuballocationType suballocType,
13535 VmaAllocationRequest currRequest = {};
13536 if(pBlock->m_pMetadata->CreateAllocationRequest(
13539 m_BufferImageGranularity,
13549 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13553 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13554 if(res != VK_SUCCESS)
13560 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13561 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13562 UpdateHasEmptyBlock();
13563 (*pAllocation)->InitBlockAllocation(
13565 currRequest.offset,
13572 VMA_HEAVY_ASSERT(pBlock->Validate());
13573 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13574 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13575 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13577 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13579 if(IsCorruptionDetectionEnabled())
13581 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13582 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13586 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13589 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13591 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13592 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13593 allocInfo.allocationSize = blockSize;
13595 #if VMA_BUFFER_DEVICE_ADDRESS
13597 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13598 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13600 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13601 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13605 #if VMA_MEMORY_PRIORITY
13606 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
13607 if(m_hAllocator->m_UseExtMemoryPriority)
13609 priorityInfo.priority = m_Priority;
13610 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
13614 VkDeviceMemory mem = VK_NULL_HANDLE;
13615 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13624 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13630 allocInfo.allocationSize,
13634 m_Blocks.push_back(pBlock);
13635 if(pNewBlockIndex != VMA_NULL)
13637 *pNewBlockIndex = m_Blocks.size() - 1;
13643 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13644 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13645 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13647 const size_t blockCount = m_Blocks.size();
13648 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13652 BLOCK_FLAG_USED = 0x00000001,
13653 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13661 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13662 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13663 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13666 const size_t moveCount = moves.size();
13667 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13669 const VmaDefragmentationMove& move = moves[moveIndex];
13670 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13671 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13674 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13677 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13679 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13680 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13681 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13683 currBlockInfo.pMappedData = pBlock->GetMappedData();
13685 if(currBlockInfo.pMappedData == VMA_NULL)
13687 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13688 if(pDefragCtx->res == VK_SUCCESS)
13690 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13697 if(pDefragCtx->res == VK_SUCCESS)
13699 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13700 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13702 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13704 const VmaDefragmentationMove& move = moves[moveIndex];
13706 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13707 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13709 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13714 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13715 memRange.memory = pSrcBlock->GetDeviceMemory();
13716 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13717 memRange.size = VMA_MIN(
13718 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13719 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13720 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13725 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13726 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13727 static_cast<size_t>(move.size));
13729 if(IsCorruptionDetectionEnabled())
13731 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13732 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13738 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13739 memRange.memory = pDstBlock->GetDeviceMemory();
13740 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13741 memRange.size = VMA_MIN(
13742 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13743 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13744 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13751 for(
size_t blockIndex = blockCount; blockIndex--; )
13753 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13754 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13756 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13757 pBlock->Unmap(m_hAllocator, 1);
13762 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13763 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13764 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13765 VkCommandBuffer commandBuffer)
13767 const size_t blockCount = m_Blocks.size();
13769 pDefragCtx->blockContexts.resize(blockCount);
13770 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13773 const size_t moveCount = moves.size();
13774 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13776 const VmaDefragmentationMove& move = moves[moveIndex];
13781 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13782 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13786 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13790 VkBufferCreateInfo bufCreateInfo;
13791 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13793 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13795 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13796 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13797 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13799 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13800 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13801 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13802 if(pDefragCtx->res == VK_SUCCESS)
13804 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13805 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13812 if(pDefragCtx->res == VK_SUCCESS)
13814 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13816 const VmaDefragmentationMove& move = moves[moveIndex];
13818 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13819 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13821 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13823 VkBufferCopy region = {
13827 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13828 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13833 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13835 pDefragCtx->res = VK_NOT_READY;
13841 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13843 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13844 if(pBlock->m_pMetadata->IsEmpty())
13846 if(m_Blocks.size() > m_MinBlockCount)
13848 if(pDefragmentationStats != VMA_NULL)
13851 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13854 VmaVectorRemove(m_Blocks, blockIndex);
13855 pBlock->Destroy(m_hAllocator);
13856 vma_delete(m_hAllocator, pBlock);
13864 UpdateHasEmptyBlock();
13867 void VmaBlockVector::UpdateHasEmptyBlock()
13869 m_HasEmptyBlock =
false;
13870 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13872 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13873 if(pBlock->m_pMetadata->IsEmpty())
13875 m_HasEmptyBlock =
true;
13881 #if VMA_STATS_STRING_ENABLED
13883 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13885 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13887 json.BeginObject();
13891 const char* poolName = m_hParentPool->GetName();
13892 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13894 json.WriteString(
"Name");
13895 json.WriteString(poolName);
13898 json.WriteString(
"MemoryTypeIndex");
13899 json.WriteNumber(m_MemoryTypeIndex);
13901 json.WriteString(
"BlockSize");
13902 json.WriteNumber(m_PreferredBlockSize);
13904 json.WriteString(
"BlockCount");
13905 json.BeginObject(
true);
13906 if(m_MinBlockCount > 0)
13908 json.WriteString(
"Min");
13909 json.WriteNumber((uint64_t)m_MinBlockCount);
13911 if(m_MaxBlockCount < SIZE_MAX)
13913 json.WriteString(
"Max");
13914 json.WriteNumber((uint64_t)m_MaxBlockCount);
13916 json.WriteString(
"Cur");
13917 json.WriteNumber((uint64_t)m_Blocks.size());
13920 if(m_FrameInUseCount > 0)
13922 json.WriteString(
"FrameInUseCount");
13923 json.WriteNumber(m_FrameInUseCount);
13926 if(m_Algorithm != 0)
13928 json.WriteString(
"Algorithm");
13929 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13934 json.WriteString(
"PreferredBlockSize");
13935 json.WriteNumber(m_PreferredBlockSize);
13938 json.WriteString(
"Blocks");
13939 json.BeginObject();
13940 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13942 json.BeginString();
13943 json.ContinueString(m_Blocks[i]->GetId());
13946 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13955 void VmaBlockVector::Defragment(
13956 class VmaBlockVectorDefragmentationContext* pCtx,
13958 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13959 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13960 VkCommandBuffer commandBuffer)
13962 pCtx->res = VK_SUCCESS;
13964 const VkMemoryPropertyFlags memPropFlags =
13965 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13966 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13968 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13970 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13971 !IsCorruptionDetectionEnabled() &&
13972 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13975 if(canDefragmentOnCpu || canDefragmentOnGpu)
13977 bool defragmentOnGpu;
13979 if(canDefragmentOnGpu != canDefragmentOnCpu)
13981 defragmentOnGpu = canDefragmentOnGpu;
13986 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13987 m_hAllocator->IsIntegratedGpu();
13990 bool overlappingMoveSupported = !defragmentOnGpu;
13992 if(m_hAllocator->m_UseMutex)
13996 if(!m_Mutex.TryLockWrite())
13998 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
14004 m_Mutex.LockWrite();
14005 pCtx->mutexLocked =
true;
14009 pCtx->Begin(overlappingMoveSupported, flags);
14013 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
14014 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
14015 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
14018 if(pStats != VMA_NULL)
14020 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
14021 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
14024 VMA_ASSERT(bytesMoved <= maxBytesToMove);
14025 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
14026 if(defragmentOnGpu)
14028 maxGpuBytesToMove -= bytesMoved;
14029 maxGpuAllocationsToMove -= allocationsMoved;
14033 maxCpuBytesToMove -= bytesMoved;
14034 maxCpuAllocationsToMove -= allocationsMoved;
14040 if(m_hAllocator->m_UseMutex)
14041 m_Mutex.UnlockWrite();
14043 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
14044 pCtx->res = VK_NOT_READY;
14049 if(pCtx->res >= VK_SUCCESS)
14051 if(defragmentOnGpu)
14053 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
14057 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
14063 void VmaBlockVector::DefragmentationEnd(
14064 class VmaBlockVectorDefragmentationContext* pCtx,
14070 VMA_ASSERT(pCtx->mutexLocked ==
false);
14074 m_Mutex.LockWrite();
14075 pCtx->mutexLocked =
true;
14079 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
14082 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
14084 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
14085 if(blockCtx.hBuffer)
14087 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
14091 if(pCtx->res >= VK_SUCCESS)
14093 FreeEmptyBlocks(pStats);
14097 if(pCtx->mutexLocked)
14099 VMA_ASSERT(m_hAllocator->m_UseMutex);
14100 m_Mutex.UnlockWrite();
14104 uint32_t VmaBlockVector::ProcessDefragmentations(
14105 class VmaBlockVectorDefragmentationContext *pCtx,
14108 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14110 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
14112 for(uint32_t i = 0; i < moveCount; ++ i)
14114 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
14117 pMove->
memory = move.pDstBlock->GetDeviceMemory();
14118 pMove->
offset = move.dstOffset;
14123 pCtx->defragmentationMovesProcessed += moveCount;
14128 void VmaBlockVector::CommitDefragmentations(
14129 class VmaBlockVectorDefragmentationContext *pCtx,
14132 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14134 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
14136 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
14138 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
14139 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
14142 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
14143 FreeEmptyBlocks(pStats);
14146 size_t VmaBlockVector::CalcAllocationCount()
const
14149 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14151 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
14156 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
14158 if(m_BufferImageGranularity == 1)
14162 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
14163 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
14165 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
14166 VMA_ASSERT(m_Algorithm == 0);
14167 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
14168 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
14176 void VmaBlockVector::MakePoolAllocationsLost(
14177 uint32_t currentFrameIndex,
14178 size_t* pLostAllocationCount)
14180 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14181 size_t lostAllocationCount = 0;
14182 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14184 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14185 VMA_ASSERT(pBlock);
14186 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
14188 if(pLostAllocationCount != VMA_NULL)
14190 *pLostAllocationCount = lostAllocationCount;
14194 VkResult VmaBlockVector::CheckCorruption()
14196 if(!IsCorruptionDetectionEnabled())
14198 return VK_ERROR_FEATURE_NOT_PRESENT;
14201 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14202 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14204 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14205 VMA_ASSERT(pBlock);
14206 VkResult res = pBlock->CheckCorruption(m_hAllocator);
14207 if(res != VK_SUCCESS)
14215 void VmaBlockVector::AddStats(
VmaStats* pStats)
14217 const uint32_t memTypeIndex = m_MemoryTypeIndex;
14218 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
14220 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14222 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14224 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14225 VMA_ASSERT(pBlock);
14226 VMA_HEAVY_ASSERT(pBlock->Validate());
14228 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
14229 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14230 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14231 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14238 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
14240 VmaBlockVector* pBlockVector,
14241 uint32_t currentFrameIndex,
14242 bool overlappingMoveSupported) :
14243 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14244 m_AllocationCount(0),
14245 m_AllAllocations(false),
14247 m_AllocationsMoved(0),
14248 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
14251 const size_t blockCount = m_pBlockVector->m_Blocks.size();
14252 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14254 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
14255 pBlockInfo->m_OriginalBlockIndex = blockIndex;
14256 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
14257 m_Blocks.push_back(pBlockInfo);
14261 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
14264 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
14266 for(
size_t i = m_Blocks.size(); i--; )
14268 vma_delete(m_hAllocator, m_Blocks[i]);
14272 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14275 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
14277 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
14278 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
14279 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
14281 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
14282 (*it)->m_Allocations.push_back(allocInfo);
14289 ++m_AllocationCount;
14293 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
14294 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14295 VkDeviceSize maxBytesToMove,
14296 uint32_t maxAllocationsToMove,
14297 bool freeOldAllocations)
14299 if(m_Blocks.empty())
14312 size_t srcBlockMinIndex = 0;
14325 size_t srcBlockIndex = m_Blocks.size() - 1;
14326 size_t srcAllocIndex = SIZE_MAX;
14332 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14334 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14337 if(srcBlockIndex == srcBlockMinIndex)
14344 srcAllocIndex = SIZE_MAX;
14349 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14353 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14354 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14356 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14357 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14358 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14359 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14362 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14364 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14365 VmaAllocationRequest dstAllocRequest;
14366 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14367 m_CurrentFrameIndex,
14368 m_pBlockVector->GetFrameInUseCount(),
14369 m_pBlockVector->GetBufferImageGranularity(),
14376 &dstAllocRequest) &&
14378 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14380 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14383 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14384 (m_BytesMoved + size > maxBytesToMove))
14389 VmaDefragmentationMove move = {};
14390 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14391 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14392 move.srcOffset = srcOffset;
14393 move.dstOffset = dstAllocRequest.offset;
14395 move.hAllocation = allocInfo.m_hAllocation;
14396 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14397 move.pDstBlock = pDstBlockInfo->m_pBlock;
14399 moves.push_back(move);
14401 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14405 allocInfo.m_hAllocation);
14407 if(freeOldAllocations)
14409 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14410 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14413 if(allocInfo.m_pChanged != VMA_NULL)
14415 *allocInfo.m_pChanged = VK_TRUE;
14418 ++m_AllocationsMoved;
14419 m_BytesMoved += size;
14421 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14429 if(srcAllocIndex > 0)
14435 if(srcBlockIndex > 0)
14438 srcAllocIndex = SIZE_MAX;
14448 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14451 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14453 if(m_Blocks[i]->m_HasNonMovableAllocations)
14461 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14462 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14463 VkDeviceSize maxBytesToMove,
14464 uint32_t maxAllocationsToMove,
14467 if(!m_AllAllocations && m_AllocationCount == 0)
14472 const size_t blockCount = m_Blocks.size();
14473 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14475 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14477 if(m_AllAllocations)
14479 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14480 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14481 it != pMetadata->m_Suballocations.end();
14484 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14486 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14487 pBlockInfo->m_Allocations.push_back(allocInfo);
14492 pBlockInfo->CalcHasNonMovableAllocations();
14496 pBlockInfo->SortAllocationsByOffsetDescending();
14502 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14505 const uint32_t roundCount = 2;
14508 VkResult result = VK_SUCCESS;
14509 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14517 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14518 size_t dstBlockIndex, VkDeviceSize dstOffset,
14519 size_t srcBlockIndex, VkDeviceSize srcOffset)
14521 if(dstBlockIndex < srcBlockIndex)
14525 if(dstBlockIndex > srcBlockIndex)
14529 if(dstOffset < srcOffset)
14539 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14541 VmaBlockVector* pBlockVector,
14542 uint32_t currentFrameIndex,
14543 bool overlappingMoveSupported) :
14544 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14545 m_OverlappingMoveSupported(overlappingMoveSupported),
14546 m_AllocationCount(0),
14547 m_AllAllocations(false),
14549 m_AllocationsMoved(0),
14550 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14552 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14556 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14560 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14561 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14562 VkDeviceSize maxBytesToMove,
14563 uint32_t maxAllocationsToMove,
14566 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14568 const size_t blockCount = m_pBlockVector->GetBlockCount();
14569 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14574 PreprocessMetadata();
14578 m_BlockInfos.resize(blockCount);
14579 for(
size_t i = 0; i < blockCount; ++i)
14581 m_BlockInfos[i].origBlockIndex = i;
14584 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14585 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14586 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14591 FreeSpaceDatabase freeSpaceDb;
14593 size_t dstBlockInfoIndex = 0;
14594 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14595 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14596 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14597 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14598 VkDeviceSize dstOffset = 0;
14601 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14603 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14604 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14605 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14606 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14607 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14609 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14610 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14611 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14612 if(m_AllocationsMoved == maxAllocationsToMove ||
14613 m_BytesMoved + srcAllocSize > maxBytesToMove)
14618 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14620 VmaDefragmentationMove move = {};
14622 size_t freeSpaceInfoIndex;
14623 VkDeviceSize dstAllocOffset;
14624 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14625 freeSpaceInfoIndex, dstAllocOffset))
14627 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14628 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14629 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14632 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14634 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14638 VmaSuballocation suballoc = *srcSuballocIt;
14639 suballoc.offset = dstAllocOffset;
14640 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14641 m_BytesMoved += srcAllocSize;
14642 ++m_AllocationsMoved;
14644 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14646 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14647 srcSuballocIt = nextSuballocIt;
14649 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14651 move.srcBlockIndex = srcOrigBlockIndex;
14652 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14653 move.srcOffset = srcAllocOffset;
14654 move.dstOffset = dstAllocOffset;
14655 move.size = srcAllocSize;
14657 moves.push_back(move);
14664 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14666 VmaSuballocation suballoc = *srcSuballocIt;
14667 suballoc.offset = dstAllocOffset;
14668 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14669 m_BytesMoved += srcAllocSize;
14670 ++m_AllocationsMoved;
14672 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14674 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14675 srcSuballocIt = nextSuballocIt;
14677 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14679 move.srcBlockIndex = srcOrigBlockIndex;
14680 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14681 move.srcOffset = srcAllocOffset;
14682 move.dstOffset = dstAllocOffset;
14683 move.size = srcAllocSize;
14685 moves.push_back(move);
14690 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14693 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14694 dstAllocOffset + srcAllocSize > dstBlockSize)
14697 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14699 ++dstBlockInfoIndex;
14700 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14701 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14702 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14703 dstBlockSize = pDstMetadata->GetSize();
14705 dstAllocOffset = 0;
14709 if(dstBlockInfoIndex == srcBlockInfoIndex)
14711 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14713 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14715 bool skipOver = overlap;
14716 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14720 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14725 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14727 dstOffset = srcAllocOffset + srcAllocSize;
14733 srcSuballocIt->offset = dstAllocOffset;
14734 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14735 dstOffset = dstAllocOffset + srcAllocSize;
14736 m_BytesMoved += srcAllocSize;
14737 ++m_AllocationsMoved;
14740 move.srcBlockIndex = srcOrigBlockIndex;
14741 move.dstBlockIndex = dstOrigBlockIndex;
14742 move.srcOffset = srcAllocOffset;
14743 move.dstOffset = dstAllocOffset;
14744 move.size = srcAllocSize;
14746 moves.push_back(move);
14754 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14755 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14757 VmaSuballocation suballoc = *srcSuballocIt;
14758 suballoc.offset = dstAllocOffset;
14759 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14760 dstOffset = dstAllocOffset + srcAllocSize;
14761 m_BytesMoved += srcAllocSize;
14762 ++m_AllocationsMoved;
14764 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14766 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14767 srcSuballocIt = nextSuballocIt;
14769 pDstMetadata->m_Suballocations.push_back(suballoc);
14771 move.srcBlockIndex = srcOrigBlockIndex;
14772 move.dstBlockIndex = dstOrigBlockIndex;
14773 move.srcOffset = srcAllocOffset;
14774 move.dstOffset = dstAllocOffset;
14775 move.size = srcAllocSize;
14777 moves.push_back(move);
14783 m_BlockInfos.clear();
14785 PostprocessMetadata();
14790 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14792 const size_t blockCount = m_pBlockVector->GetBlockCount();
14793 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14795 VmaBlockMetadata_Generic*
const pMetadata =
14796 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14797 pMetadata->m_FreeCount = 0;
14798 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14799 pMetadata->m_FreeSuballocationsBySize.clear();
14800 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14801 it != pMetadata->m_Suballocations.end(); )
14803 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14805 VmaSuballocationList::iterator nextIt = it;
14807 pMetadata->m_Suballocations.erase(it);
14818 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14820 const size_t blockCount = m_pBlockVector->GetBlockCount();
14821 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14823 VmaBlockMetadata_Generic*
const pMetadata =
14824 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14825 const VkDeviceSize blockSize = pMetadata->GetSize();
14828 if(pMetadata->m_Suballocations.empty())
14830 pMetadata->m_FreeCount = 1;
14832 VmaSuballocation suballoc = {
14836 VMA_SUBALLOCATION_TYPE_FREE };
14837 pMetadata->m_Suballocations.push_back(suballoc);
14838 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14843 VkDeviceSize offset = 0;
14844 VmaSuballocationList::iterator it;
14845 for(it = pMetadata->m_Suballocations.begin();
14846 it != pMetadata->m_Suballocations.end();
14849 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14850 VMA_ASSERT(it->offset >= offset);
14853 if(it->offset > offset)
14855 ++pMetadata->m_FreeCount;
14856 const VkDeviceSize freeSize = it->offset - offset;
14857 VmaSuballocation suballoc = {
14861 VMA_SUBALLOCATION_TYPE_FREE };
14862 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14863 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14865 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14869 pMetadata->m_SumFreeSize -= it->size;
14870 offset = it->offset + it->size;
14874 if(offset < blockSize)
14876 ++pMetadata->m_FreeCount;
14877 const VkDeviceSize freeSize = blockSize - offset;
14878 VmaSuballocation suballoc = {
14882 VMA_SUBALLOCATION_TYPE_FREE };
14883 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14884 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14885 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14887 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14892 pMetadata->m_FreeSuballocationsBySize.begin(),
14893 pMetadata->m_FreeSuballocationsBySize.end(),
14894 VmaSuballocationItemSizeLess());
14897 VMA_HEAVY_ASSERT(pMetadata->Validate());
14901 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14904 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14905 while(it != pMetadata->m_Suballocations.end())
14907 if(it->offset < suballoc.offset)
14912 pMetadata->m_Suballocations.insert(it, suballoc);
14918 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14921 VmaBlockVector* pBlockVector,
14922 uint32_t currFrameIndex) :
14924 mutexLocked(false),
14925 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14926 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14927 defragmentationMovesProcessed(0),
14928 defragmentationMovesCommitted(0),
14929 hasDefragmentationPlan(0),
14930 m_hAllocator(hAllocator),
14931 m_hCustomPool(hCustomPool),
14932 m_pBlockVector(pBlockVector),
14933 m_CurrFrameIndex(currFrameIndex),
14934 m_pAlgorithm(VMA_NULL),
14935 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14936 m_AllAllocations(false)
14940 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14942 vma_delete(m_hAllocator, m_pAlgorithm);
14945 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14947 AllocInfo info = { hAlloc, pChanged };
14948 m_Allocations.push_back(info);
14951 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14953 const bool allAllocations = m_AllAllocations ||
14954 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14967 if(VMA_DEBUG_MARGIN == 0 &&
14969 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14972 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14973 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14977 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14978 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14983 m_pAlgorithm->AddAll();
14987 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14989 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14997 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14999 uint32_t currFrameIndex,
15002 m_hAllocator(hAllocator),
15003 m_CurrFrameIndex(currFrameIndex),
15006 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
15008 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
15011 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
15013 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15015 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
15016 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15017 vma_delete(m_hAllocator, pBlockVectorCtx);
15019 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
15021 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
15022 if(pBlockVectorCtx)
15024 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15025 vma_delete(m_hAllocator, pBlockVectorCtx);
15030 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
15032 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15034 VmaPool pool = pPools[poolIndex];
15037 if(pool->m_BlockVector.GetAlgorithm() == 0)
15039 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15041 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15043 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
15045 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15050 if(!pBlockVectorDefragCtx)
15052 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15055 &pool->m_BlockVector,
15057 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15060 pBlockVectorDefragCtx->AddAll();
15065 void VmaDefragmentationContext_T::AddAllocations(
15066 uint32_t allocationCount,
15068 VkBool32* pAllocationsChanged)
15071 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15074 VMA_ASSERT(hAlloc);
15076 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
15078 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
15080 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15082 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
15084 if(hAllocPool != VK_NULL_HANDLE)
15087 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
15089 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15091 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
15093 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15097 if(!pBlockVectorDefragCtx)
15099 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15102 &hAllocPool->m_BlockVector,
15104 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15111 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
15112 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
15113 if(!pBlockVectorDefragCtx)
15115 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15118 m_hAllocator->m_pBlockVectors[memTypeIndex],
15120 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
15124 if(pBlockVectorDefragCtx)
15126 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
15127 &pAllocationsChanged[allocIndex] : VMA_NULL;
15128 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
15134 VkResult VmaDefragmentationContext_T::Defragment(
15135 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
15136 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
15148 m_MaxCpuBytesToMove = maxCpuBytesToMove;
15149 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
15151 m_MaxGpuBytesToMove = maxGpuBytesToMove;
15152 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
15154 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
15155 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
15158 return VK_NOT_READY;
15161 if(commandBuffer == VK_NULL_HANDLE)
15163 maxGpuBytesToMove = 0;
15164 maxGpuAllocationsToMove = 0;
15167 VkResult res = VK_SUCCESS;
15170 for(uint32_t memTypeIndex = 0;
15171 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
15174 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15175 if(pBlockVectorCtx)
15177 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15178 pBlockVectorCtx->GetBlockVector()->Defragment(
15181 maxCpuBytesToMove, maxCpuAllocationsToMove,
15182 maxGpuBytesToMove, maxGpuAllocationsToMove,
15184 if(pBlockVectorCtx->res != VK_SUCCESS)
15186 res = pBlockVectorCtx->res;
15192 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15193 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
15196 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15197 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15198 pBlockVectorCtx->GetBlockVector()->Defragment(
15201 maxCpuBytesToMove, maxCpuAllocationsToMove,
15202 maxGpuBytesToMove, maxGpuAllocationsToMove,
15204 if(pBlockVectorCtx->res != VK_SUCCESS)
15206 res = pBlockVectorCtx->res;
15219 for(uint32_t memTypeIndex = 0;
15220 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15223 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15224 if(pBlockVectorCtx)
15226 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15228 if(!pBlockVectorCtx->hasDefragmentationPlan)
15230 pBlockVectorCtx->GetBlockVector()->Defragment(
15233 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15234 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15237 if(pBlockVectorCtx->res < VK_SUCCESS)
15240 pBlockVectorCtx->hasDefragmentationPlan =
true;
15243 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15245 pCurrentMove, movesLeft);
15247 movesLeft -= processed;
15248 pCurrentMove += processed;
15253 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15254 customCtxIndex < customCtxCount;
15257 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15258 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15260 if(!pBlockVectorCtx->hasDefragmentationPlan)
15262 pBlockVectorCtx->GetBlockVector()->Defragment(
15265 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15266 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15269 if(pBlockVectorCtx->res < VK_SUCCESS)
15272 pBlockVectorCtx->hasDefragmentationPlan =
true;
15275 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15277 pCurrentMove, movesLeft);
15279 movesLeft -= processed;
15280 pCurrentMove += processed;
15287 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
15289 VkResult res = VK_SUCCESS;
15292 for(uint32_t memTypeIndex = 0;
15293 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15296 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15297 if(pBlockVectorCtx)
15299 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15301 if(!pBlockVectorCtx->hasDefragmentationPlan)
15303 res = VK_NOT_READY;
15307 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15308 pBlockVectorCtx, m_pStats);
15310 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15311 res = VK_NOT_READY;
15316 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15317 customCtxIndex < customCtxCount;
15320 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15321 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15323 if(!pBlockVectorCtx->hasDefragmentationPlan)
15325 res = VK_NOT_READY;
15329 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15330 pBlockVectorCtx, m_pStats);
15332 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15333 res = VK_NOT_READY;
15342 #if VMA_RECORDING_ENABLED
15344 VmaRecorder::VmaRecorder() :
15348 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15354 m_UseMutex = useMutex;
15355 m_Flags = settings.
flags;
15357 #if defined(_WIN32)
15359 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15363 return VK_ERROR_INITIALIZATION_FAILED;
15367 m_File = fopen(settings.
pFilePath,
"wb");
15371 return VK_ERROR_INITIALIZATION_FAILED;
15376 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15377 fprintf(m_File,
"%s\n",
"1,8");
15382 VmaRecorder::~VmaRecorder()
15384 if(m_File != VMA_NULL)
15390 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15392 CallParams callParams;
15393 GetBasicParams(callParams);
15395 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15396 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15400 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15402 CallParams callParams;
15403 GetBasicParams(callParams);
15405 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15406 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15412 CallParams callParams;
15413 GetBasicParams(callParams);
15415 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15416 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15427 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15429 CallParams callParams;
15430 GetBasicParams(callParams);
15432 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15433 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15438 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15439 const VkMemoryRequirements& vkMemReq,
15443 CallParams callParams;
15444 GetBasicParams(callParams);
15446 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15447 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15448 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15450 vkMemReq.alignment,
15451 vkMemReq.memoryTypeBits,
15459 userDataStr.GetString());
15463 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15464 const VkMemoryRequirements& vkMemReq,
15466 uint64_t allocationCount,
15469 CallParams callParams;
15470 GetBasicParams(callParams);
15472 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15473 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15474 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15476 vkMemReq.alignment,
15477 vkMemReq.memoryTypeBits,
15484 PrintPointerList(allocationCount, pAllocations);
15485 fprintf(m_File,
",%s\n", userDataStr.GetString());
15489 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15490 const VkMemoryRequirements& vkMemReq,
15491 bool requiresDedicatedAllocation,
15492 bool prefersDedicatedAllocation,
15496 CallParams callParams;
15497 GetBasicParams(callParams);
15499 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15500 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15501 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15503 vkMemReq.alignment,
15504 vkMemReq.memoryTypeBits,
15505 requiresDedicatedAllocation ? 1 : 0,
15506 prefersDedicatedAllocation ? 1 : 0,
15514 userDataStr.GetString());
15518 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15519 const VkMemoryRequirements& vkMemReq,
15520 bool requiresDedicatedAllocation,
15521 bool prefersDedicatedAllocation,
15525 CallParams callParams;
15526 GetBasicParams(callParams);
15528 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15529 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15530 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15532 vkMemReq.alignment,
15533 vkMemReq.memoryTypeBits,
15534 requiresDedicatedAllocation ? 1 : 0,
15535 prefersDedicatedAllocation ? 1 : 0,
15543 userDataStr.GetString());
15547 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15550 CallParams callParams;
15551 GetBasicParams(callParams);
15553 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15554 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15559 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15560 uint64_t allocationCount,
15563 CallParams callParams;
15564 GetBasicParams(callParams);
15566 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15567 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15568 PrintPointerList(allocationCount, pAllocations);
15569 fprintf(m_File,
"\n");
15573 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15575 const void* pUserData)
15577 CallParams callParams;
15578 GetBasicParams(callParams);
15580 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15581 UserDataString userDataStr(
15584 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15586 userDataStr.GetString());
15590 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15593 CallParams callParams;
15594 GetBasicParams(callParams);
15596 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15597 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15602 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15605 CallParams callParams;
15606 GetBasicParams(callParams);
15608 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15609 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15614 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15617 CallParams callParams;
15618 GetBasicParams(callParams);
15620 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15621 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15626 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15627 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15629 CallParams callParams;
15630 GetBasicParams(callParams);
15632 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15633 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15640 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15641 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15643 CallParams callParams;
15644 GetBasicParams(callParams);
15646 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15647 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15654 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15655 const VkBufferCreateInfo& bufCreateInfo,
15659 CallParams callParams;
15660 GetBasicParams(callParams);
15662 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15663 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15664 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15665 bufCreateInfo.flags,
15666 bufCreateInfo.size,
15667 bufCreateInfo.usage,
15668 bufCreateInfo.sharingMode,
15669 allocCreateInfo.
flags,
15670 allocCreateInfo.
usage,
15674 allocCreateInfo.
pool,
15676 userDataStr.GetString());
15680 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15681 const VkImageCreateInfo& imageCreateInfo,
15685 CallParams callParams;
15686 GetBasicParams(callParams);
15688 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15689 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15690 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15691 imageCreateInfo.flags,
15692 imageCreateInfo.imageType,
15693 imageCreateInfo.format,
15694 imageCreateInfo.extent.width,
15695 imageCreateInfo.extent.height,
15696 imageCreateInfo.extent.depth,
15697 imageCreateInfo.mipLevels,
15698 imageCreateInfo.arrayLayers,
15699 imageCreateInfo.samples,
15700 imageCreateInfo.tiling,
15701 imageCreateInfo.usage,
15702 imageCreateInfo.sharingMode,
15703 imageCreateInfo.initialLayout,
15704 allocCreateInfo.
flags,
15705 allocCreateInfo.
usage,
15709 allocCreateInfo.
pool,
15711 userDataStr.GetString());
15715 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15718 CallParams callParams;
15719 GetBasicParams(callParams);
15721 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15722 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15727 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15730 CallParams callParams;
15731 GetBasicParams(callParams);
15733 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15734 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15739 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15742 CallParams callParams;
15743 GetBasicParams(callParams);
15745 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15746 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15751 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15754 CallParams callParams;
15755 GetBasicParams(callParams);
15757 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15758 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15763 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15766 CallParams callParams;
15767 GetBasicParams(callParams);
15769 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15770 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15775 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15779 CallParams callParams;
15780 GetBasicParams(callParams);
15782 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15783 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15786 fprintf(m_File,
",");
15788 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15798 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15801 CallParams callParams;
15802 GetBasicParams(callParams);
15804 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15805 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15810 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15814 CallParams callParams;
15815 GetBasicParams(callParams);
15817 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15818 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15819 pool, name != VMA_NULL ? name :
"");
15825 if(pUserData != VMA_NULL)
15829 m_Str = (
const char*)pUserData;
15834 snprintf(m_PtrStr, 17,
"%p", pUserData);
15844 void VmaRecorder::WriteConfiguration(
15845 const VkPhysicalDeviceProperties& devProps,
15846 const VkPhysicalDeviceMemoryProperties& memProps,
15847 uint32_t vulkanApiVersion,
15848 bool dedicatedAllocationExtensionEnabled,
15849 bool bindMemory2ExtensionEnabled,
15850 bool memoryBudgetExtensionEnabled,
15851 bool deviceCoherentMemoryExtensionEnabled)
15853 fprintf(m_File,
"Config,Begin\n");
15855 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15857 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15858 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15859 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15860 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15861 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15862 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15864 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15865 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15866 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15868 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15869 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15871 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15872 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15874 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15875 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15877 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15878 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15881 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15882 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15883 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15884 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15886 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15887 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15888 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15889 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15890 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15891 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15892 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15893 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15894 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15896 fprintf(m_File,
"Config,End\n");
15899 void VmaRecorder::GetBasicParams(CallParams& outParams)
15901 #if defined(_WIN32)
15902 outParams.threadId = GetCurrentThreadId();
15907 std::thread::id thread_id = std::this_thread::get_id();
15908 std::stringstream thread_id_to_string_converter;
15909 thread_id_to_string_converter << thread_id;
15910 std::string thread_id_as_string = thread_id_to_string_converter.str();
15911 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15914 auto current_time = std::chrono::high_resolution_clock::now();
15916 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15919 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15923 fprintf(m_File,
"%p", pItems[0]);
15924 for(uint64_t i = 1; i < count; ++i)
15926 fprintf(m_File,
" %p", pItems[i]);
15931 void VmaRecorder::Flush()
15944 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15945 m_Allocator(pAllocationCallbacks, 1024)
15949 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15951 VmaMutexLock mutexLock(m_Mutex);
15952 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15955 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15957 VmaMutexLock mutexLock(m_Mutex);
15958 m_Allocator.Free(hAlloc);
15966 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15973 m_hDevice(pCreateInfo->device),
15974 m_hInstance(pCreateInfo->instance),
15975 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15976 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15977 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15978 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15979 m_HeapSizeLimitMask(0),
15980 m_DeviceMemoryCount(0),
15981 m_PreferredLargeHeapBlockSize(0),
15982 m_PhysicalDevice(pCreateInfo->physicalDevice),
15983 m_CurrentFrameIndex(0),
15984 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15986 m_GlobalMemoryTypeBits(UINT32_MAX)
15988 ,m_pRecorder(VMA_NULL)
15991 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15993 m_UseKhrDedicatedAllocation =
false;
15994 m_UseKhrBindMemory2 =
false;
15997 if(VMA_DEBUG_DETECT_CORRUPTION)
16000 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
16005 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
16007 #if !(VMA_DEDICATED_ALLOCATION)
16010 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
16013 #if !(VMA_BIND_MEMORY2)
16016 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
16020 #if !(VMA_MEMORY_BUDGET)
16023 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
16026 #if !(VMA_BUFFER_DEVICE_ADDRESS)
16027 if(m_UseKhrBufferDeviceAddress)
16029 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16032 #if VMA_VULKAN_VERSION < 1002000
16033 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
16035 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
16038 #if VMA_VULKAN_VERSION < 1001000
16039 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16041 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
16044 #if !(VMA_MEMORY_PRIORITY)
16045 if(m_UseExtMemoryPriority)
16047 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16051 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
16052 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
16053 memset(&m_MemProps, 0,
sizeof(m_MemProps));
16055 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
16056 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
16067 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
16068 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
16070 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
16071 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
16072 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
16073 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
16078 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
16082 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16084 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
16085 if(limit != VK_WHOLE_SIZE)
16087 m_HeapSizeLimitMask |= 1u << heapIndex;
16088 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
16090 m_MemProps.memoryHeaps[heapIndex].size = limit;
16096 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16098 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
16100 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
16104 preferredBlockSize,
16107 GetBufferImageGranularity(),
16119 VkResult res = VK_SUCCESS;
16124 #if VMA_RECORDING_ENABLED
16125 m_pRecorder = vma_new(
this, VmaRecorder)();
16127 if(res != VK_SUCCESS)
16131 m_pRecorder->WriteConfiguration(
16132 m_PhysicalDeviceProperties,
16134 m_VulkanApiVersion,
16135 m_UseKhrDedicatedAllocation,
16136 m_UseKhrBindMemory2,
16137 m_UseExtMemoryBudget,
16138 m_UseAmdDeviceCoherentMemory);
16139 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
16141 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
16142 return VK_ERROR_FEATURE_NOT_PRESENT;
16146 #if VMA_MEMORY_BUDGET
16147 if(m_UseExtMemoryBudget)
16149 UpdateVulkanBudget();
16156 VmaAllocator_T::~VmaAllocator_T()
16158 #if VMA_RECORDING_ENABLED
16159 if(m_pRecorder != VMA_NULL)
16161 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
16162 vma_delete(
this, m_pRecorder);
16166 VMA_ASSERT(m_Pools.IsEmpty());
16168 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
16170 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
16172 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
16175 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
16179 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
16181 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16182 ImportVulkanFunctions_Static();
16185 if(pVulkanFunctions != VMA_NULL)
16187 ImportVulkanFunctions_Custom(pVulkanFunctions);
16190 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16191 ImportVulkanFunctions_Dynamic();
16194 ValidateVulkanFunctions();
16197 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16199 void VmaAllocator_T::ImportVulkanFunctions_Static()
16202 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
16203 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
16204 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
16205 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
16206 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
16207 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
16208 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
16209 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
16210 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
16211 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
16212 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
16213 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
16214 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
16215 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
16216 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
16217 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
16218 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
16221 #if VMA_VULKAN_VERSION >= 1001000
16222 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16224 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
16225 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
16226 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
16227 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
16228 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
16235 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
16237 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
16239 #define VMA_COPY_IF_NOT_NULL(funcName) \
16240 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
16242 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
16243 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
16244 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
16245 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
16246 VMA_COPY_IF_NOT_NULL(vkMapMemory);
16247 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
16248 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
16249 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
16250 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
16251 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
16252 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
16253 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
16254 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
16255 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
16256 VMA_COPY_IF_NOT_NULL(vkCreateImage);
16257 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
16258 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
16260 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16261 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
16262 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
16265 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16266 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
16267 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
16270 #if VMA_MEMORY_BUDGET
16271 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
16274 #undef VMA_COPY_IF_NOT_NULL
16277 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16279 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
16281 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
16282 if(m_VulkanFunctions.memberName == VMA_NULL) \
16283 m_VulkanFunctions.memberName = \
16284 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
16285 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
16286 if(m_VulkanFunctions.memberName == VMA_NULL) \
16287 m_VulkanFunctions.memberName = \
16288 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
16290 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
16291 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
16292 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
16293 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
16294 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
16295 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
16296 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
16297 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
16298 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
16299 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
16300 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
16301 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
16302 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
16303 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
16304 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
16305 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
16306 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16308 #if VMA_VULKAN_VERSION >= 1001000
16309 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16311 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16312 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16313 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16314 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16315 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16319 #if VMA_DEDICATED_ALLOCATION
16320 if(m_UseKhrDedicatedAllocation)
16322 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16323 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16327 #if VMA_BIND_MEMORY2
16328 if(m_UseKhrBindMemory2)
16330 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16331 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16335 #if VMA_MEMORY_BUDGET
16336 if(m_UseExtMemoryBudget)
16338 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16342 #undef VMA_FETCH_DEVICE_FUNC
16343 #undef VMA_FETCH_INSTANCE_FUNC
16348 void VmaAllocator_T::ValidateVulkanFunctions()
16350 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16351 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16352 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16353 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16354 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16355 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16356 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16357 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16358 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16359 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16360 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16361 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16362 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16363 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16364 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16365 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16366 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16368 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16369 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16371 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16372 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16376 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16377 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16379 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16380 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16384 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16385 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16387 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16392 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16394 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16395 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16396 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16397 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16400 VkResult VmaAllocator_T::AllocateMemoryOfType(
16402 VkDeviceSize alignment,
16403 bool dedicatedAllocation,
16404 VkBuffer dedicatedBuffer,
16405 VkBufferUsageFlags dedicatedBufferUsage,
16406 VkImage dedicatedImage,
16408 uint32_t memTypeIndex,
16409 VmaSuballocationType suballocType,
16410 size_t allocationCount,
16413 VMA_ASSERT(pAllocations != VMA_NULL);
16414 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16420 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16430 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16431 VMA_ASSERT(blockVector);
16433 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16434 bool preferDedicatedMemory =
16435 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16436 dedicatedAllocation ||
16438 size > preferredBlockSize / 2;
16440 if(preferDedicatedMemory &&
16442 finalCreateInfo.
pool == VK_NULL_HANDLE)
16451 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16455 return AllocateDedicatedMemory(
16465 dedicatedBufferUsage,
16473 VkResult res = blockVector->Allocate(
16474 m_CurrentFrameIndex.load(),
16481 if(res == VK_SUCCESS)
16489 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16495 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
16497 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16500 res = AllocateDedicatedMemory(
16510 dedicatedBufferUsage,
16514 if(res == VK_SUCCESS)
16517 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16523 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16529 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16531 VmaSuballocationType suballocType,
16532 uint32_t memTypeIndex,
16535 bool isUserDataString,
16538 VkBuffer dedicatedBuffer,
16539 VkBufferUsageFlags dedicatedBufferUsage,
16540 VkImage dedicatedImage,
16541 size_t allocationCount,
16544 VMA_ASSERT(allocationCount > 0 && pAllocations);
16548 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16550 GetBudget(&heapBudget, heapIndex, 1);
16551 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16553 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16557 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16558 allocInfo.memoryTypeIndex = memTypeIndex;
16559 allocInfo.allocationSize = size;
16561 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16562 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16563 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16565 if(dedicatedBuffer != VK_NULL_HANDLE)
16567 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16568 dedicatedAllocInfo.buffer = dedicatedBuffer;
16569 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16571 else if(dedicatedImage != VK_NULL_HANDLE)
16573 dedicatedAllocInfo.image = dedicatedImage;
16574 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16579 #if VMA_BUFFER_DEVICE_ADDRESS
16580 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16581 if(m_UseKhrBufferDeviceAddress)
16583 bool canContainBufferWithDeviceAddress =
true;
16584 if(dedicatedBuffer != VK_NULL_HANDLE)
16586 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16587 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16589 else if(dedicatedImage != VK_NULL_HANDLE)
16591 canContainBufferWithDeviceAddress =
false;
16593 if(canContainBufferWithDeviceAddress)
16595 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16596 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16601 #if VMA_MEMORY_PRIORITY
16602 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
16603 if(m_UseExtMemoryPriority)
16605 priorityInfo.priority = priority;
16606 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
16611 VkResult res = VK_SUCCESS;
16612 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16614 res = AllocateDedicatedMemoryPage(
16622 pAllocations + allocIndex);
16623 if(res != VK_SUCCESS)
16629 if(res == VK_SUCCESS)
16633 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16634 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
16635 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16637 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
16641 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16646 while(allocIndex--)
16649 VkDeviceMemory hMemory = currAlloc->GetMemory();
16661 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16662 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16663 currAlloc->SetUserData(
this, VMA_NULL);
16664 m_AllocationObjectAllocator.Free(currAlloc);
16667 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16673 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16675 VmaSuballocationType suballocType,
16676 uint32_t memTypeIndex,
16677 const VkMemoryAllocateInfo& allocInfo,
16679 bool isUserDataString,
16683 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16684 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16687 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16691 void* pMappedData = VMA_NULL;
16694 res = (*m_VulkanFunctions.vkMapMemory)(
16703 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16704 FreeVulkanMemory(memTypeIndex, size, hMemory);
16709 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16710 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16711 (*pAllocation)->SetUserData(
this, pUserData);
16712 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16713 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16715 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16721 void VmaAllocator_T::GetBufferMemoryRequirements(
16723 VkMemoryRequirements& memReq,
16724 bool& requiresDedicatedAllocation,
16725 bool& prefersDedicatedAllocation)
const
16727 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16728 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16730 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16731 memReqInfo.buffer = hBuffer;
16733 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16735 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16736 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16738 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16740 memReq = memReq2.memoryRequirements;
16741 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16742 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16747 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16748 requiresDedicatedAllocation =
false;
16749 prefersDedicatedAllocation =
false;
16753 void VmaAllocator_T::GetImageMemoryRequirements(
16755 VkMemoryRequirements& memReq,
16756 bool& requiresDedicatedAllocation,
16757 bool& prefersDedicatedAllocation)
const
16759 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16760 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16762 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16763 memReqInfo.image = hImage;
16765 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16767 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16768 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16770 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16772 memReq = memReq2.memoryRequirements;
16773 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16774 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16779 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16780 requiresDedicatedAllocation =
false;
16781 prefersDedicatedAllocation =
false;
16785 VkResult VmaAllocator_T::AllocateMemory(
16786 const VkMemoryRequirements& vkMemReq,
16787 bool requiresDedicatedAllocation,
16788 bool prefersDedicatedAllocation,
16789 VkBuffer dedicatedBuffer,
16790 VkBufferUsageFlags dedicatedBufferUsage,
16791 VkImage dedicatedImage,
16793 VmaSuballocationType suballocType,
16794 size_t allocationCount,
16797 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16799 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16801 if(vkMemReq.size == 0)
16803 return VK_ERROR_VALIDATION_FAILED_EXT;
16808 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16809 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16814 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16815 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16817 if(requiresDedicatedAllocation)
16821 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16822 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16824 if(createInfo.
pool != VK_NULL_HANDLE)
16826 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16827 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16830 if((createInfo.
pool != VK_NULL_HANDLE) &&
16833 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16834 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16837 if(createInfo.
pool != VK_NULL_HANDLE)
16839 const VkDeviceSize alignmentForPool = VMA_MAX(
16840 vkMemReq.alignment,
16841 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16846 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16851 return createInfo.
pool->m_BlockVector.Allocate(
16852 m_CurrentFrameIndex.load(),
16863 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16864 uint32_t memTypeIndex = UINT32_MAX;
16866 if(res == VK_SUCCESS)
16868 VkDeviceSize alignmentForMemType = VMA_MAX(
16869 vkMemReq.alignment,
16870 GetMemoryTypeMinAlignment(memTypeIndex));
16872 res = AllocateMemoryOfType(
16874 alignmentForMemType,
16875 requiresDedicatedAllocation || prefersDedicatedAllocation,
16877 dedicatedBufferUsage,
16885 if(res == VK_SUCCESS)
16895 memoryTypeBits &= ~(1u << memTypeIndex);
16898 if(res == VK_SUCCESS)
16900 alignmentForMemType = VMA_MAX(
16901 vkMemReq.alignment,
16902 GetMemoryTypeMinAlignment(memTypeIndex));
16904 res = AllocateMemoryOfType(
16906 alignmentForMemType,
16907 requiresDedicatedAllocation || prefersDedicatedAllocation,
16909 dedicatedBufferUsage,
16917 if(res == VK_SUCCESS)
16927 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16938 void VmaAllocator_T::FreeMemory(
16939 size_t allocationCount,
16942 VMA_ASSERT(pAllocations);
16944 for(
size_t allocIndex = allocationCount; allocIndex--; )
16948 if(allocation != VK_NULL_HANDLE)
16950 if(TouchAllocation(allocation))
16952 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16954 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16957 switch(allocation->GetType())
16959 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16961 VmaBlockVector* pBlockVector = VMA_NULL;
16962 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16963 if(hPool != VK_NULL_HANDLE)
16965 pBlockVector = &hPool->m_BlockVector;
16969 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16970 pBlockVector = m_pBlockVectors[memTypeIndex];
16972 pBlockVector->Free(allocation);
16975 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16976 FreeDedicatedMemory(allocation);
16984 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16985 allocation->SetUserData(
this, VMA_NULL);
16986 m_AllocationObjectAllocator.Free(allocation);
16991 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16994 InitStatInfo(pStats->
total);
16995 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16997 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
17001 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17003 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17004 VMA_ASSERT(pBlockVector);
17005 pBlockVector->AddStats(pStats);
17010 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17011 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17013 pool->m_BlockVector.AddStats(pStats);
17018 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17020 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
17021 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17022 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17024 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
17027 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
17028 VmaAddStatInfo(pStats->
total, allocationStatInfo);
17029 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
17030 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
17035 VmaPostprocessCalcStatInfo(pStats->
total);
17036 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
17037 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
17038 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
17039 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
17042 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
17044 #if VMA_MEMORY_BUDGET
17045 if(m_UseExtMemoryBudget)
17047 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
17049 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
17050 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17052 const uint32_t heapIndex = firstHeap + i;
17054 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17057 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
17059 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
17060 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17064 outBudget->
usage = 0;
17068 outBudget->
budget = VMA_MIN(
17069 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
17074 UpdateVulkanBudget();
17075 GetBudget(outBudget, firstHeap, heapCount);
17081 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17083 const uint32_t heapIndex = firstHeap + i;
17085 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17089 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17094 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
17096 VkResult VmaAllocator_T::DefragmentationBegin(
17106 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
17107 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
17110 (*pContext)->AddAllocations(
17113 VkResult res = (*pContext)->Defragment(
17118 if(res != VK_NOT_READY)
17120 vma_delete(
this, *pContext);
17121 *pContext = VMA_NULL;
17127 VkResult VmaAllocator_T::DefragmentationEnd(
17130 vma_delete(
this, context);
17134 VkResult VmaAllocator_T::DefragmentationPassBegin(
17138 return context->DefragmentPassBegin(pInfo);
17140 VkResult VmaAllocator_T::DefragmentationPassEnd(
17143 return context->DefragmentPassEnd();
17149 if(hAllocation->CanBecomeLost())
17155 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17156 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17159 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17163 pAllocationInfo->
offset = 0;
17164 pAllocationInfo->
size = hAllocation->GetSize();
17166 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17169 else if(localLastUseFrameIndex == localCurrFrameIndex)
17171 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17172 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17173 pAllocationInfo->
offset = hAllocation->GetOffset();
17174 pAllocationInfo->
size = hAllocation->GetSize();
17176 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17181 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17183 localLastUseFrameIndex = localCurrFrameIndex;
17190 #if VMA_STATS_STRING_ENABLED
17191 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17192 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17195 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17196 if(localLastUseFrameIndex == localCurrFrameIndex)
17202 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17204 localLastUseFrameIndex = localCurrFrameIndex;
17210 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17211 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17212 pAllocationInfo->
offset = hAllocation->GetOffset();
17213 pAllocationInfo->
size = hAllocation->GetSize();
17214 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
17215 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17219 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
17222 if(hAllocation->CanBecomeLost())
17224 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17225 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17228 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17232 else if(localLastUseFrameIndex == localCurrFrameIndex)
17238 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17240 localLastUseFrameIndex = localCurrFrameIndex;
17247 #if VMA_STATS_STRING_ENABLED
17248 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17249 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17252 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17253 if(localLastUseFrameIndex == localCurrFrameIndex)
17259 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17261 localLastUseFrameIndex = localCurrFrameIndex;
17273 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
17283 return VK_ERROR_INITIALIZATION_FAILED;
17287 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
17289 return VK_ERROR_FEATURE_NOT_PRESENT;
17292 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
17294 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
17296 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
17297 if(res != VK_SUCCESS)
17299 vma_delete(
this, *pPool);
17306 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17307 (*pPool)->SetId(m_NextPoolId++);
17308 m_Pools.PushBack(*pPool);
17314 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17318 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17319 m_Pools.Remove(pool);
17322 vma_delete(
this, pool);
17327 pool->m_BlockVector.GetPoolStats(pPoolStats);
17330 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17332 m_CurrentFrameIndex.store(frameIndex);
17334 #if VMA_MEMORY_BUDGET
17335 if(m_UseExtMemoryBudget)
17337 UpdateVulkanBudget();
17342 void VmaAllocator_T::MakePoolAllocationsLost(
17344 size_t* pLostAllocationCount)
17346 hPool->m_BlockVector.MakePoolAllocationsLost(
17347 m_CurrentFrameIndex.load(),
17348 pLostAllocationCount);
17351 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17353 return hPool->m_BlockVector.CheckCorruption();
17356 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17358 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17361 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17363 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17365 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17366 VMA_ASSERT(pBlockVector);
17367 VkResult localRes = pBlockVector->CheckCorruption();
17370 case VK_ERROR_FEATURE_NOT_PRESENT:
17373 finalRes = VK_SUCCESS;
17383 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17384 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17386 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17388 VkResult localRes = pool->m_BlockVector.CheckCorruption();
17391 case VK_ERROR_FEATURE_NOT_PRESENT:
17394 finalRes = VK_SUCCESS;
17406 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17408 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17409 (*pAllocation)->InitLost();
17413 template<
typename T>
17414 struct AtomicTransactionalIncrement
17417 typedef std::atomic<T> AtomicT;
17418 ~AtomicTransactionalIncrement()
17423 T Increment(AtomicT* atomic)
17426 return m_Atomic->fetch_add(1);
17430 m_Atomic =
nullptr;
17434 AtomicT* m_Atomic =
nullptr;
17437 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17439 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
17440 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
17441 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
17442 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
17444 return VK_ERROR_TOO_MANY_OBJECTS;
17448 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17451 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17453 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17454 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17457 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17458 if(blockBytesAfterAllocation > heapSize)
17460 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17462 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17470 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17474 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17476 if(res == VK_SUCCESS)
17478 #if VMA_MEMORY_BUDGET
17479 ++m_Budget.m_OperationsSinceBudgetFetch;
17483 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17485 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17488 deviceMemoryCountIncrement.Commit();
17492 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17498 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17501 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17503 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17507 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17509 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17511 --m_DeviceMemoryCount;
17514 VkResult VmaAllocator_T::BindVulkanBuffer(
17515 VkDeviceMemory memory,
17516 VkDeviceSize memoryOffset,
17520 if(pNext != VMA_NULL)
17522 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17523 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17524 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17526 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17527 bindBufferMemoryInfo.pNext = pNext;
17528 bindBufferMemoryInfo.buffer = buffer;
17529 bindBufferMemoryInfo.memory = memory;
17530 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17531 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17536 return VK_ERROR_EXTENSION_NOT_PRESENT;
17541 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17545 VkResult VmaAllocator_T::BindVulkanImage(
17546 VkDeviceMemory memory,
17547 VkDeviceSize memoryOffset,
17551 if(pNext != VMA_NULL)
17553 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17554 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17555 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17557 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17558 bindBufferMemoryInfo.pNext = pNext;
17559 bindBufferMemoryInfo.image = image;
17560 bindBufferMemoryInfo.memory = memory;
17561 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17562 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17567 return VK_ERROR_EXTENSION_NOT_PRESENT;
17572 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17576 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17578 if(hAllocation->CanBecomeLost())
17580 return VK_ERROR_MEMORY_MAP_FAILED;
17583 switch(hAllocation->GetType())
17585 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17587 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17588 char *pBytes = VMA_NULL;
17589 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17590 if(res == VK_SUCCESS)
17592 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17593 hAllocation->BlockAllocMap();
17597 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17598 return hAllocation->DedicatedAllocMap(
this, ppData);
17601 return VK_ERROR_MEMORY_MAP_FAILED;
17607 switch(hAllocation->GetType())
17609 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17611 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17612 hAllocation->BlockAllocUnmap();
17613 pBlock->Unmap(
this, 1);
17616 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17617 hAllocation->DedicatedAllocUnmap(
this);
17624 VkResult VmaAllocator_T::BindBufferMemory(
17626 VkDeviceSize allocationLocalOffset,
17630 VkResult res = VK_SUCCESS;
17631 switch(hAllocation->GetType())
17633 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17634 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17636 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17638 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17639 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17640 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17649 VkResult VmaAllocator_T::BindImageMemory(
17651 VkDeviceSize allocationLocalOffset,
17655 VkResult res = VK_SUCCESS;
17656 switch(hAllocation->GetType())
17658 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17659 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17661 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17663 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17664 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17665 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17674 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17676 VkDeviceSize offset, VkDeviceSize size,
17677 VMA_CACHE_OPERATION op)
17679 VkResult res = VK_SUCCESS;
17681 VkMappedMemoryRange memRange = {};
17682 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17686 case VMA_CACHE_FLUSH:
17687 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17689 case VMA_CACHE_INVALIDATE:
17690 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17700 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17701 uint32_t allocationCount,
17703 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17704 VMA_CACHE_OPERATION op)
17706 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17707 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17708 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17710 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17713 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17714 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17715 VkMappedMemoryRange newRange;
17716 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17718 ranges.push_back(newRange);
17722 VkResult res = VK_SUCCESS;
17723 if(!ranges.empty())
17727 case VMA_CACHE_FLUSH:
17728 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17730 case VMA_CACHE_INVALIDATE:
17731 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17741 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17743 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17745 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17747 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17748 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
17749 dedicatedAllocations.Remove(allocation);
17752 VkDeviceMemory hMemory = allocation->GetMemory();
17764 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17766 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17769 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17771 VkBufferCreateInfo dummyBufCreateInfo;
17772 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17774 uint32_t memoryTypeBits = 0;
17777 VkBuffer buf = VK_NULL_HANDLE;
17778 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17779 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17780 if(res == VK_SUCCESS)
17783 VkMemoryRequirements memReq;
17784 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17785 memoryTypeBits = memReq.memoryTypeBits;
17788 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17791 return memoryTypeBits;
17794 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17797 VMA_ASSERT(GetMemoryTypeCount() > 0);
17799 uint32_t memoryTypeBits = UINT32_MAX;
17801 if(!m_UseAmdDeviceCoherentMemory)
17804 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17806 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17808 memoryTypeBits &= ~(1u << memTypeIndex);
17813 return memoryTypeBits;
17816 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17818 VkDeviceSize offset, VkDeviceSize size,
17819 VkMappedMemoryRange& outRange)
const
17821 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17822 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17824 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17825 const VkDeviceSize allocationSize = allocation->GetSize();
17826 VMA_ASSERT(offset <= allocationSize);
17828 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17829 outRange.pNext = VMA_NULL;
17830 outRange.memory = allocation->GetMemory();
17832 switch(allocation->GetType())
17834 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17835 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17836 if(size == VK_WHOLE_SIZE)
17838 outRange.size = allocationSize - outRange.offset;
17842 VMA_ASSERT(offset + size <= allocationSize);
17843 outRange.size = VMA_MIN(
17844 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17845 allocationSize - outRange.offset);
17848 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17851 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17852 if(size == VK_WHOLE_SIZE)
17854 size = allocationSize - offset;
17858 VMA_ASSERT(offset + size <= allocationSize);
17860 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17863 const VkDeviceSize allocationOffset = allocation->GetOffset();
17864 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17865 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17866 outRange.offset += allocationOffset;
17867 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17879 #if VMA_MEMORY_BUDGET
17881 void VmaAllocator_T::UpdateVulkanBudget()
17883 VMA_ASSERT(m_UseExtMemoryBudget);
17885 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17887 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17888 VmaPnextChainPushFront(&memProps, &budgetProps);
17890 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17893 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17895 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17897 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17898 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17899 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17902 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17904 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17906 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17908 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17910 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17912 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17915 m_Budget.m_OperationsSinceBudgetFetch = 0;
17921 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17923 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17924 !hAllocation->CanBecomeLost() &&
17925 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17927 void* pData = VMA_NULL;
17928 VkResult res = Map(hAllocation, &pData);
17929 if(res == VK_SUCCESS)
17931 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17932 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17933 Unmap(hAllocation);
17937 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17942 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17944 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17945 if(memoryTypeBits == UINT32_MAX)
17947 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17948 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17950 return memoryTypeBits;
17953 #if VMA_STATS_STRING_ENABLED
17955 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17957 bool dedicatedAllocationsStarted =
false;
17958 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17960 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17961 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17962 if(!dedicatedAllocList.IsEmpty())
17964 if(dedicatedAllocationsStarted ==
false)
17966 dedicatedAllocationsStarted =
true;
17967 json.WriteString(
"DedicatedAllocations");
17968 json.BeginObject();
17971 json.BeginString(
"Type ");
17972 json.ContinueString(memTypeIndex);
17978 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
17980 json.BeginObject(
true);
17981 alloc->PrintParameters(json);
17988 if(dedicatedAllocationsStarted)
17994 bool allocationsStarted =
false;
17995 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17997 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17999 if(allocationsStarted ==
false)
18001 allocationsStarted =
true;
18002 json.WriteString(
"DefaultPools");
18003 json.BeginObject();
18006 json.BeginString(
"Type ");
18007 json.ContinueString(memTypeIndex);
18010 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
18013 if(allocationsStarted)
18021 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
18022 if(!m_Pools.IsEmpty())
18024 json.WriteString(
"Pools");
18025 json.BeginObject();
18026 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
18028 json.BeginString();
18029 json.ContinueString(pool->GetId());
18032 pool->m_BlockVector.PrintDetailedMap(json);
18048 VMA_ASSERT(pCreateInfo && pAllocator);
18051 VMA_DEBUG_LOG(
"vmaCreateAllocator");
18053 return (*pAllocator)->Init(pCreateInfo);
18059 if(allocator != VK_NULL_HANDLE)
18061 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
18062 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
18063 vma_delete(&allocationCallbacks, allocator);
18069 VMA_ASSERT(allocator && pAllocatorInfo);
18070 pAllocatorInfo->
instance = allocator->m_hInstance;
18071 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
18072 pAllocatorInfo->
device = allocator->m_hDevice;
18077 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
18079 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
18080 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
18085 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
18087 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
18088 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
18093 uint32_t memoryTypeIndex,
18094 VkMemoryPropertyFlags* pFlags)
18096 VMA_ASSERT(allocator && pFlags);
18097 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
18098 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
18103 uint32_t frameIndex)
18105 VMA_ASSERT(allocator);
18106 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
18108 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18110 allocator->SetCurrentFrameIndex(frameIndex);
18117 VMA_ASSERT(allocator && pStats);
18118 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18119 allocator->CalculateStats(pStats);
18126 VMA_ASSERT(allocator && pBudget);
18127 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18128 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
18131 #if VMA_STATS_STRING_ENABLED
18135 char** ppStatsString,
18136 VkBool32 detailedMap)
18138 VMA_ASSERT(allocator && ppStatsString);
18139 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18141 VmaStringBuilder sb(allocator);
18143 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
18144 json.BeginObject();
18147 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
18150 allocator->CalculateStats(&stats);
18152 json.WriteString(
"Total");
18153 VmaPrintStatInfo(json, stats.
total);
18155 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
18157 json.BeginString(
"Heap ");
18158 json.ContinueString(heapIndex);
18160 json.BeginObject();
18162 json.WriteString(
"Size");
18163 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
18165 json.WriteString(
"Flags");
18166 json.BeginArray(
true);
18167 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
18169 json.WriteString(
"DEVICE_LOCAL");
18173 json.WriteString(
"Budget");
18174 json.BeginObject();
18176 json.WriteString(
"BlockBytes");
18177 json.WriteNumber(budget[heapIndex].blockBytes);
18178 json.WriteString(
"AllocationBytes");
18179 json.WriteNumber(budget[heapIndex].allocationBytes);
18180 json.WriteString(
"Usage");
18181 json.WriteNumber(budget[heapIndex].usage);
18182 json.WriteString(
"Budget");
18183 json.WriteNumber(budget[heapIndex].budget);
18189 json.WriteString(
"Stats");
18190 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
18193 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
18195 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
18197 json.BeginString(
"Type ");
18198 json.ContinueString(typeIndex);
18201 json.BeginObject();
18203 json.WriteString(
"Flags");
18204 json.BeginArray(
true);
18205 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
18206 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
18208 json.WriteString(
"DEVICE_LOCAL");
18210 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18212 json.WriteString(
"HOST_VISIBLE");
18214 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
18216 json.WriteString(
"HOST_COHERENT");
18218 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
18220 json.WriteString(
"HOST_CACHED");
18222 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
18224 json.WriteString(
"LAZILY_ALLOCATED");
18226 #if VMA_VULKAN_VERSION >= 1001000
18227 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
18229 json.WriteString(
"PROTECTED");
18232 #if VK_AMD_device_coherent_memory
18233 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
18235 json.WriteString(
"DEVICE_COHERENT");
18237 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
18239 json.WriteString(
"DEVICE_UNCACHED");
18246 json.WriteString(
"Stats");
18247 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
18256 if(detailedMap == VK_TRUE)
18258 allocator->PrintDetailedMap(json);
18264 const size_t len = sb.GetLength();
18265 char*
const pChars = vma_new_array(allocator,
char, len + 1);
18268 memcpy(pChars, sb.GetData(), len);
18270 pChars[len] =
'\0';
18271 *ppStatsString = pChars;
18276 char* pStatsString)
18278 if(pStatsString != VMA_NULL)
18280 VMA_ASSERT(allocator);
18281 size_t len = strlen(pStatsString);
18282 vma_delete_array(allocator, pStatsString, len + 1);
18293 uint32_t memoryTypeBits,
18295 uint32_t* pMemoryTypeIndex)
18297 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18298 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18299 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18301 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
18308 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
18309 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
18310 uint32_t notPreferredFlags = 0;
18313 switch(pAllocationCreateInfo->
usage)
18318 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18320 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18324 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
18327 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18328 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18330 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18334 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18335 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
18338 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18341 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18350 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18352 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18355 *pMemoryTypeIndex = UINT32_MAX;
18356 uint32_t minCost = UINT32_MAX;
18357 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18358 memTypeIndex < allocator->GetMemoryTypeCount();
18359 ++memTypeIndex, memTypeBit <<= 1)
18362 if((memTypeBit & memoryTypeBits) != 0)
18364 const VkMemoryPropertyFlags currFlags =
18365 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18367 if((requiredFlags & ~currFlags) == 0)
18370 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18371 VmaCountBitsSet(currFlags & notPreferredFlags);
18373 if(currCost < minCost)
18375 *pMemoryTypeIndex = memTypeIndex;
18380 minCost = currCost;
18385 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18390 const VkBufferCreateInfo* pBufferCreateInfo,
18392 uint32_t* pMemoryTypeIndex)
18394 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18395 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18396 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18397 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18399 const VkDevice hDev = allocator->m_hDevice;
18400 VkBuffer hBuffer = VK_NULL_HANDLE;
18401 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18402 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18403 if(res == VK_SUCCESS)
18405 VkMemoryRequirements memReq = {};
18406 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18407 hDev, hBuffer, &memReq);
18411 memReq.memoryTypeBits,
18412 pAllocationCreateInfo,
18415 allocator->GetVulkanFunctions().vkDestroyBuffer(
18416 hDev, hBuffer, allocator->GetAllocationCallbacks());
18423 const VkImageCreateInfo* pImageCreateInfo,
18425 uint32_t* pMemoryTypeIndex)
18427 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18428 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18429 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18430 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18432 const VkDevice hDev = allocator->m_hDevice;
18433 VkImage hImage = VK_NULL_HANDLE;
18434 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18435 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18436 if(res == VK_SUCCESS)
18438 VkMemoryRequirements memReq = {};
18439 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18440 hDev, hImage, &memReq);
18444 memReq.memoryTypeBits,
18445 pAllocationCreateInfo,
18448 allocator->GetVulkanFunctions().vkDestroyImage(
18449 hDev, hImage, allocator->GetAllocationCallbacks());
18459 VMA_ASSERT(allocator && pCreateInfo && pPool);
18461 VMA_DEBUG_LOG(
"vmaCreatePool");
18463 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18465 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18467 #if VMA_RECORDING_ENABLED
18468 if(allocator->GetRecorder() != VMA_NULL)
18470 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18481 VMA_ASSERT(allocator);
18483 if(pool == VK_NULL_HANDLE)
18488 VMA_DEBUG_LOG(
"vmaDestroyPool");
18490 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18492 #if VMA_RECORDING_ENABLED
18493 if(allocator->GetRecorder() != VMA_NULL)
18495 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18499 allocator->DestroyPool(pool);
18507 VMA_ASSERT(allocator && pool && pPoolStats);
18509 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18511 allocator->GetPoolStats(pool, pPoolStats);
18517 size_t* pLostAllocationCount)
18519 VMA_ASSERT(allocator && pool);
18521 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18523 #if VMA_RECORDING_ENABLED
18524 if(allocator->GetRecorder() != VMA_NULL)
18526 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18530 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18535 VMA_ASSERT(allocator && pool);
18537 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18539 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18541 return allocator->CheckPoolCorruption(pool);
18547 const char** ppName)
18549 VMA_ASSERT(allocator && pool && ppName);
18551 VMA_DEBUG_LOG(
"vmaGetPoolName");
18553 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18555 *ppName = pool->GetName();
18563 VMA_ASSERT(allocator && pool);
18565 VMA_DEBUG_LOG(
"vmaSetPoolName");
18567 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18569 pool->SetName(pName);
18571 #if VMA_RECORDING_ENABLED
18572 if(allocator->GetRecorder() != VMA_NULL)
18574 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18581 const VkMemoryRequirements* pVkMemoryRequirements,
18586 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18588 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18590 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18592 VkResult result = allocator->AllocateMemory(
18593 *pVkMemoryRequirements,
18600 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18604 #if VMA_RECORDING_ENABLED
18605 if(allocator->GetRecorder() != VMA_NULL)
18607 allocator->GetRecorder()->RecordAllocateMemory(
18608 allocator->GetCurrentFrameIndex(),
18609 *pVkMemoryRequirements,
18615 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18617 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18625 const VkMemoryRequirements* pVkMemoryRequirements,
18627 size_t allocationCount,
18631 if(allocationCount == 0)
18636 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18638 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18640 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18642 VkResult result = allocator->AllocateMemory(
18643 *pVkMemoryRequirements,
18650 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18654 #if VMA_RECORDING_ENABLED
18655 if(allocator->GetRecorder() != VMA_NULL)
18657 allocator->GetRecorder()->RecordAllocateMemoryPages(
18658 allocator->GetCurrentFrameIndex(),
18659 *pVkMemoryRequirements,
18661 (uint64_t)allocationCount,
18666 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18668 for(
size_t i = 0; i < allocationCount; ++i)
18670 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18684 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18686 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18688 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18690 VkMemoryRequirements vkMemReq = {};
18691 bool requiresDedicatedAllocation =
false;
18692 bool prefersDedicatedAllocation =
false;
18693 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18694 requiresDedicatedAllocation,
18695 prefersDedicatedAllocation);
18697 VkResult result = allocator->AllocateMemory(
18699 requiresDedicatedAllocation,
18700 prefersDedicatedAllocation,
18705 VMA_SUBALLOCATION_TYPE_BUFFER,
18709 #if VMA_RECORDING_ENABLED
18710 if(allocator->GetRecorder() != VMA_NULL)
18712 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18713 allocator->GetCurrentFrameIndex(),
18715 requiresDedicatedAllocation,
18716 prefersDedicatedAllocation,
18722 if(pAllocationInfo && result == VK_SUCCESS)
18724 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18737 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18739 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18741 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18743 VkMemoryRequirements vkMemReq = {};
18744 bool requiresDedicatedAllocation =
false;
18745 bool prefersDedicatedAllocation =
false;
18746 allocator->GetImageMemoryRequirements(image, vkMemReq,
18747 requiresDedicatedAllocation, prefersDedicatedAllocation);
18749 VkResult result = allocator->AllocateMemory(
18751 requiresDedicatedAllocation,
18752 prefersDedicatedAllocation,
18757 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18761 #if VMA_RECORDING_ENABLED
18762 if(allocator->GetRecorder() != VMA_NULL)
18764 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18765 allocator->GetCurrentFrameIndex(),
18767 requiresDedicatedAllocation,
18768 prefersDedicatedAllocation,
18774 if(pAllocationInfo && result == VK_SUCCESS)
18776 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18786 VMA_ASSERT(allocator);
18788 if(allocation == VK_NULL_HANDLE)
18793 VMA_DEBUG_LOG(
"vmaFreeMemory");
18795 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18797 #if VMA_RECORDING_ENABLED
18798 if(allocator->GetRecorder() != VMA_NULL)
18800 allocator->GetRecorder()->RecordFreeMemory(
18801 allocator->GetCurrentFrameIndex(),
18806 allocator->FreeMemory(
18813 size_t allocationCount,
18816 if(allocationCount == 0)
18821 VMA_ASSERT(allocator);
18823 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18825 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18827 #if VMA_RECORDING_ENABLED
18828 if(allocator->GetRecorder() != VMA_NULL)
18830 allocator->GetRecorder()->RecordFreeMemoryPages(
18831 allocator->GetCurrentFrameIndex(),
18832 (uint64_t)allocationCount,
18837 allocator->FreeMemory(allocationCount, pAllocations);
18845 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18847 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18849 #if VMA_RECORDING_ENABLED
18850 if(allocator->GetRecorder() != VMA_NULL)
18852 allocator->GetRecorder()->RecordGetAllocationInfo(
18853 allocator->GetCurrentFrameIndex(),
18858 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18865 VMA_ASSERT(allocator && allocation);
18867 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18869 #if VMA_RECORDING_ENABLED
18870 if(allocator->GetRecorder() != VMA_NULL)
18872 allocator->GetRecorder()->RecordTouchAllocation(
18873 allocator->GetCurrentFrameIndex(),
18878 return allocator->TouchAllocation(allocation);
18886 VMA_ASSERT(allocator && allocation);
18888 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18890 allocation->SetUserData(allocator, pUserData);
18892 #if VMA_RECORDING_ENABLED
18893 if(allocator->GetRecorder() != VMA_NULL)
18895 allocator->GetRecorder()->RecordSetAllocationUserData(
18896 allocator->GetCurrentFrameIndex(),
18907 VMA_ASSERT(allocator && pAllocation);
18909 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18911 allocator->CreateLostAllocation(pAllocation);
18913 #if VMA_RECORDING_ENABLED
18914 if(allocator->GetRecorder() != VMA_NULL)
18916 allocator->GetRecorder()->RecordCreateLostAllocation(
18917 allocator->GetCurrentFrameIndex(),
18928 VMA_ASSERT(allocator && allocation && ppData);
18930 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18932 VkResult res = allocator->Map(allocation, ppData);
18934 #if VMA_RECORDING_ENABLED
18935 if(allocator->GetRecorder() != VMA_NULL)
18937 allocator->GetRecorder()->RecordMapMemory(
18938 allocator->GetCurrentFrameIndex(),
18950 VMA_ASSERT(allocator && allocation);
18952 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18954 #if VMA_RECORDING_ENABLED
18955 if(allocator->GetRecorder() != VMA_NULL)
18957 allocator->GetRecorder()->RecordUnmapMemory(
18958 allocator->GetCurrentFrameIndex(),
18963 allocator->Unmap(allocation);
18968 VMA_ASSERT(allocator && allocation);
18970 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18972 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18974 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18976 #if VMA_RECORDING_ENABLED
18977 if(allocator->GetRecorder() != VMA_NULL)
18979 allocator->GetRecorder()->RecordFlushAllocation(
18980 allocator->GetCurrentFrameIndex(),
18981 allocation, offset, size);
18990 VMA_ASSERT(allocator && allocation);
18992 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18994 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18996 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18998 #if VMA_RECORDING_ENABLED
18999 if(allocator->GetRecorder() != VMA_NULL)
19001 allocator->GetRecorder()->RecordInvalidateAllocation(
19002 allocator->GetCurrentFrameIndex(),
19003 allocation, offset, size);
19012 uint32_t allocationCount,
19014 const VkDeviceSize* offsets,
19015 const VkDeviceSize* sizes)
19017 VMA_ASSERT(allocator);
19019 if(allocationCount == 0)
19024 VMA_ASSERT(allocations);
19026 VMA_DEBUG_LOG(
"vmaFlushAllocations");
19028 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19030 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
19032 #if VMA_RECORDING_ENABLED
19033 if(allocator->GetRecorder() != VMA_NULL)
19044 uint32_t allocationCount,
19046 const VkDeviceSize* offsets,
19047 const VkDeviceSize* sizes)
19049 VMA_ASSERT(allocator);
19051 if(allocationCount == 0)
19056 VMA_ASSERT(allocations);
19058 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
19060 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19062 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
19064 #if VMA_RECORDING_ENABLED
19065 if(allocator->GetRecorder() != VMA_NULL)
19076 VMA_ASSERT(allocator);
19078 VMA_DEBUG_LOG(
"vmaCheckCorruption");
19080 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19082 return allocator->CheckCorruption(memoryTypeBits);
19088 size_t allocationCount,
19089 VkBool32* pAllocationsChanged,
19099 if(pDefragmentationInfo != VMA_NULL)
19113 if(res == VK_NOT_READY)
19126 VMA_ASSERT(allocator && pInfo && pContext);
19137 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
19139 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
19141 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19143 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
19145 #if VMA_RECORDING_ENABLED
19146 if(allocator->GetRecorder() != VMA_NULL)
19148 allocator->GetRecorder()->RecordDefragmentationBegin(
19149 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
19160 VMA_ASSERT(allocator);
19162 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
19164 if(context != VK_NULL_HANDLE)
19166 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19168 #if VMA_RECORDING_ENABLED
19169 if(allocator->GetRecorder() != VMA_NULL)
19171 allocator->GetRecorder()->RecordDefragmentationEnd(
19172 allocator->GetCurrentFrameIndex(), context);
19176 return allocator->DefragmentationEnd(context);
19190 VMA_ASSERT(allocator);
19193 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
19195 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19197 if(context == VK_NULL_HANDLE)
19203 return allocator->DefragmentationPassBegin(pInfo, context);
19209 VMA_ASSERT(allocator);
19211 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
19212 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19214 if(context == VK_NULL_HANDLE)
19217 return allocator->DefragmentationPassEnd(context);
19225 VMA_ASSERT(allocator && allocation && buffer);
19227 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
19229 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19231 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
19237 VkDeviceSize allocationLocalOffset,
19241 VMA_ASSERT(allocator && allocation && buffer);
19243 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
19245 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19247 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
19255 VMA_ASSERT(allocator && allocation && image);
19257 VMA_DEBUG_LOG(
"vmaBindImageMemory");
19259 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19261 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
19267 VkDeviceSize allocationLocalOffset,
19271 VMA_ASSERT(allocator && allocation && image);
19273 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
19275 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19277 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
19282 const VkBufferCreateInfo* pBufferCreateInfo,
19288 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
19290 if(pBufferCreateInfo->size == 0)
19292 return VK_ERROR_VALIDATION_FAILED_EXT;
19294 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19295 !allocator->m_UseKhrBufferDeviceAddress)
19297 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19298 return VK_ERROR_VALIDATION_FAILED_EXT;
19301 VMA_DEBUG_LOG(
"vmaCreateBuffer");
19303 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19305 *pBuffer = VK_NULL_HANDLE;
19306 *pAllocation = VK_NULL_HANDLE;
19309 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19310 allocator->m_hDevice,
19312 allocator->GetAllocationCallbacks(),
19317 VkMemoryRequirements vkMemReq = {};
19318 bool requiresDedicatedAllocation =
false;
19319 bool prefersDedicatedAllocation =
false;
19320 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19321 requiresDedicatedAllocation, prefersDedicatedAllocation);
19324 res = allocator->AllocateMemory(
19326 requiresDedicatedAllocation,
19327 prefersDedicatedAllocation,
19329 pBufferCreateInfo->usage,
19331 *pAllocationCreateInfo,
19332 VMA_SUBALLOCATION_TYPE_BUFFER,
19336 #if VMA_RECORDING_ENABLED
19337 if(allocator->GetRecorder() != VMA_NULL)
19339 allocator->GetRecorder()->RecordCreateBuffer(
19340 allocator->GetCurrentFrameIndex(),
19341 *pBufferCreateInfo,
19342 *pAllocationCreateInfo,
19352 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19357 #if VMA_STATS_STRING_ENABLED
19358 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19360 if(pAllocationInfo != VMA_NULL)
19362 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19367 allocator->FreeMemory(
19370 *pAllocation = VK_NULL_HANDLE;
19371 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19372 *pBuffer = VK_NULL_HANDLE;
19375 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19376 *pBuffer = VK_NULL_HANDLE;
19387 VMA_ASSERT(allocator);
19389 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19394 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19396 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19398 #if VMA_RECORDING_ENABLED
19399 if(allocator->GetRecorder() != VMA_NULL)
19401 allocator->GetRecorder()->RecordDestroyBuffer(
19402 allocator->GetCurrentFrameIndex(),
19407 if(buffer != VK_NULL_HANDLE)
19409 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19412 if(allocation != VK_NULL_HANDLE)
19414 allocator->FreeMemory(
19422 const VkImageCreateInfo* pImageCreateInfo,
19428 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19430 if(pImageCreateInfo->extent.width == 0 ||
19431 pImageCreateInfo->extent.height == 0 ||
19432 pImageCreateInfo->extent.depth == 0 ||
19433 pImageCreateInfo->mipLevels == 0 ||
19434 pImageCreateInfo->arrayLayers == 0)
19436 return VK_ERROR_VALIDATION_FAILED_EXT;
19439 VMA_DEBUG_LOG(
"vmaCreateImage");
19441 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19443 *pImage = VK_NULL_HANDLE;
19444 *pAllocation = VK_NULL_HANDLE;
19447 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19448 allocator->m_hDevice,
19450 allocator->GetAllocationCallbacks(),
19454 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19455 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19456 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19459 VkMemoryRequirements vkMemReq = {};
19460 bool requiresDedicatedAllocation =
false;
19461 bool prefersDedicatedAllocation =
false;
19462 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19463 requiresDedicatedAllocation, prefersDedicatedAllocation);
19465 res = allocator->AllocateMemory(
19467 requiresDedicatedAllocation,
19468 prefersDedicatedAllocation,
19472 *pAllocationCreateInfo,
19477 #if VMA_RECORDING_ENABLED
19478 if(allocator->GetRecorder() != VMA_NULL)
19480 allocator->GetRecorder()->RecordCreateImage(
19481 allocator->GetCurrentFrameIndex(),
19483 *pAllocationCreateInfo,
19493 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19498 #if VMA_STATS_STRING_ENABLED
19499 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19501 if(pAllocationInfo != VMA_NULL)
19503 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19508 allocator->FreeMemory(
19511 *pAllocation = VK_NULL_HANDLE;
19512 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19513 *pImage = VK_NULL_HANDLE;
19516 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19517 *pImage = VK_NULL_HANDLE;
19528 VMA_ASSERT(allocator);
19530 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19535 VMA_DEBUG_LOG(
"vmaDestroyImage");
19537 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19539 #if VMA_RECORDING_ENABLED
19540 if(allocator->GetRecorder() != VMA_NULL)
19542 allocator->GetRecorder()->RecordDestroyImage(
19543 allocator->GetCurrentFrameIndex(),
19548 if(image != VK_NULL_HANDLE)
19550 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19552 if(allocation != VK_NULL_HANDLE)
19554 allocator->FreeMemory(
Definition: vk_mem_alloc.h:2881
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2907
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2913
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2899
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2920
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2894
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:2927
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2889
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2883
Represents single memory allocation.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:3231
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:3255
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:3275
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:3236
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:3266
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:3280
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:3245
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:2415
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:2420
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2446
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:2471
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:2417
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null.
Definition: vk_mem_alloc.h:2477
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:2429
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2489
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:2426
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:2484
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:2423
uint32_t vulkanApiVersion
Optional. The highest version of Vulkan that the application is designed to use.
Definition: vk_mem_alloc.h:2498
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:2432
Represents main object of this library initialized.
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:2513
VkDevice device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:2528
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2518
VkPhysicalDevice physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:2523
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
Definition: vk_mem_alloc.h:2619
VkDeviceSize blockBytes
Sum size of all VkDeviceMemory blocks allocated from particular heap, in bytes.
Definition: vk_mem_alloc.h:2622
VkDeviceSize allocationBytes
Sum size of all allocations created in particular heap, in bytes.
Definition: vk_mem_alloc.h:2633
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:2643
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:2654
Represents Opaque object that represents started defragmentation process.
Parameters for defragmentation.
Definition: vk_mem_alloc.h:3630
const VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:3670
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:3636
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:3690
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3685
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:3633
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:3651
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:3654
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:3699
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:3680
const VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:3645
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3675
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:3721
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:3731
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3726
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:3712
uint32_t moveCount
Definition: vk_mem_alloc.h:3713
VmaDefragmentationPassMoveInfo * pMoves
Definition: vk_mem_alloc.h:3714
Definition: vk_mem_alloc.h:3702
VkDeviceMemory memory
Definition: vk_mem_alloc.h:3704
VkDeviceSize offset
Definition: vk_mem_alloc.h:3705
VmaAllocation allocation
Definition: vk_mem_alloc.h:3703
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:3735
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:3743
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3737
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:3739
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:3741
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:2224
void * pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:2230
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:2226
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:2228
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:3049
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:3097
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:3052
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:3055
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:3091
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:3064
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:3069
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:3077
Represents custom memory pool.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:3102
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:3105
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:3124
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:3121
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:3111
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3108
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3114
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:2400
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:2410
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:2402
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:2580
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2591
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2591
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2590
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2592
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:2584
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2592
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2588
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:2582
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2591
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:2586
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2592
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2597
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2599
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2598
VmaStatInfo total
Definition: vk_mem_alloc.h:2600
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:2354
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:2364
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:2369
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:2357
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:2361
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:2366
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:2358
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:2365
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:2362
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:2356
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:2355
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:2368
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:2370
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:2363
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:2359
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:2360
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:2371
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:2367
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:2210
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VkResult vmaEndDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context)
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:2031
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:3045
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:2386
@ VMA_RECORD_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2394
@ VMA_RECORD_FLUSH_AFTER_CALL_BIT
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:2392
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:2234
@ VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
Definition: vk_mem_alloc.h:2309
@ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:2239
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT
Definition: vk_mem_alloc.h:2291
@ VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
Definition: vk_mem_alloc.h:2327
@ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT
Definition: vk_mem_alloc.h:2279
@ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:2264
@ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2346
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT
Definition: vk_mem_alloc.h:2344
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2878
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
void vmaFreeMemory(VmaAllocator allocator, const VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:3620
@ VMA_DEFRAGMENTATION_FLAG_INCREMENTAL
Definition: vk_mem_alloc.h:3621
@ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3622
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
struct VmaDefragmentationPassInfo VmaDefragmentationPassInfo
Parameters for incremental defragmentation steps.
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:2203
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, const VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:3624
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2989
@ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3024
@ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3043
@ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3035
@ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:3007
@ VMA_POOL_CREATE_ALGORITHM_MASK
Definition: vk_mem_alloc.h:3039
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkResult vmaDefragment(VmaAllocator allocator, const VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
VmaMemoryUsage
Definition: vk_mem_alloc.h:2702
@ VMA_MEMORY_USAGE_MAX_ENUM
Definition: vk_mem_alloc.h:2765
@ VMA_MEMORY_USAGE_CPU_ONLY
Definition: vk_mem_alloc.h:2733
@ VMA_MEMORY_USAGE_CPU_COPY
Definition: vk_mem_alloc.h:2755
@ VMA_MEMORY_USAGE_GPU_TO_CPU
Definition: vk_mem_alloc.h:2749
@ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED
Definition: vk_mem_alloc.h:2763
@ VMA_MEMORY_USAGE_CPU_TO_GPU
Definition: vk_mem_alloc.h:2740
@ VMA_MEMORY_USAGE_GPU_ONLY
Definition: vk_mem_alloc.h:2723
@ VMA_MEMORY_USAGE_UNKNOWN
Definition: vk_mem_alloc.h:2706
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
VkResult vmaInvalidateAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Invalidates memory of given set of allocations.
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
VkResult vmaBeginDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context, VmaDefragmentationPassInfo *pInfo)
VkResult vmaFlushAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Flushes memory of given set of allocations.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:2348
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2769
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Definition: vk_mem_alloc.h:2864
@ VMA_ALLOCATION_CREATE_MAPPED_BIT
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2800
@ VMA_ALLOCATION_CREATE_DONT_BIND_BIT
Definition: vk_mem_alloc.h:2837
@ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT
Definition: vk_mem_alloc.h:2857
@ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2776
@ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Definition: vk_mem_alloc.h:2831
@ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
Definition: vk_mem_alloc.h:2813
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT
Definition: vk_mem_alloc.h:2867
@ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
Definition: vk_mem_alloc.h:2820
@ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT
Definition: vk_mem_alloc.h:2846
@ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2787
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Definition: vk_mem_alloc.h:2861
@ VMA_ALLOCATION_CREATE_STRATEGY_MASK
Definition: vk_mem_alloc.h:2871
@ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT
Definition: vk_mem_alloc.h:2826
@ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT
Definition: vk_mem_alloc.h:2841
@ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT
Definition: vk_mem_alloc.h:2850
@ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2876
void vmaSetPoolName(VmaAllocator allocator, VmaPool pool, const char *pName)
Sets name of a custom pool.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
void vmaGetBudget(VmaAllocator allocator, VmaBudget *pBudget)
Retrieves information about current memory budget for all memory heaps.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
void vmaGetPoolName(VmaAllocator allocator, VmaPool pool, const char **ppName)
Retrieves name of a custom pool.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:2396
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
void vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo *pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.