23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2028 #ifndef VMA_RECORDING_ENABLED
2029 #define VMA_RECORDING_ENABLED 0
2032 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2036 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2037 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2038 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2039 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2040 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2041 extern PFN_vkAllocateMemory vkAllocateMemory;
2042 extern PFN_vkFreeMemory vkFreeMemory;
2043 extern PFN_vkMapMemory vkMapMemory;
2044 extern PFN_vkUnmapMemory vkUnmapMemory;
2045 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2046 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2047 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2048 extern PFN_vkBindImageMemory vkBindImageMemory;
2049 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2050 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2051 extern PFN_vkCreateBuffer vkCreateBuffer;
2052 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2053 extern PFN_vkCreateImage vkCreateImage;
2054 extern PFN_vkDestroyImage vkDestroyImage;
2055 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2056 #if VMA_VULKAN_VERSION >= 1001000
2057 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2058 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2059 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2060 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2061 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2066 #include <vulkan/vulkan.h>
2072 #if !defined(VMA_VULKAN_VERSION)
2073 #if defined(VK_VERSION_1_2)
2074 #define VMA_VULKAN_VERSION 1002000
2075 #elif defined(VK_VERSION_1_1)
2076 #define VMA_VULKAN_VERSION 1001000
2078 #define VMA_VULKAN_VERSION 1000000
2082 #if !defined(VMA_DEDICATED_ALLOCATION)
2083 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2084 #define VMA_DEDICATED_ALLOCATION 1
2086 #define VMA_DEDICATED_ALLOCATION 0
2090 #if !defined(VMA_BIND_MEMORY2)
2091 #if VK_KHR_bind_memory2
2092 #define VMA_BIND_MEMORY2 1
2094 #define VMA_BIND_MEMORY2 0
2098 #if !defined(VMA_MEMORY_BUDGET)
2099 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2100 #define VMA_MEMORY_BUDGET 1
2102 #define VMA_MEMORY_BUDGET 0
2107 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2108 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2109 #define VMA_BUFFER_DEVICE_ADDRESS 1
2111 #define VMA_BUFFER_DEVICE_ADDRESS 0
2116 #if !defined(VMA_MEMORY_PRIORITY)
2117 #if VK_EXT_memory_priority
2118 #define VMA_MEMORY_PRIORITY 1
2120 #define VMA_MEMORY_PRIORITY 0
2125 #if !defined(VMA_EXTERNAL_MEMORY)
2126 #if VK_KHR_external_memory
2127 #define VMA_EXTERNAL_MEMORY 1
2129 #define VMA_EXTERNAL_MEMORY 0
2138 #ifndef VMA_CALL_PRE
2139 #define VMA_CALL_PRE
2141 #ifndef VMA_CALL_POST
2142 #define VMA_CALL_POST
2156 #ifndef VMA_LEN_IF_NOT_NULL
2157 #define VMA_LEN_IF_NOT_NULL(len)
2162 #ifndef VMA_NULLABLE
2164 #define VMA_NULLABLE _Nullable
2166 #define VMA_NULLABLE
2172 #ifndef VMA_NOT_NULL
2174 #define VMA_NOT_NULL _Nonnull
2176 #define VMA_NOT_NULL
2182 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2183 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2184 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2186 #define VMA_NOT_NULL_NON_DISPATCHABLE
2190 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2191 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2192 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2194 #define VMA_NULLABLE_NON_DISPATCHABLE
2212 uint32_t memoryType,
2213 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2215 void* VMA_NULLABLE pUserData);
2219 uint32_t memoryType,
2220 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2222 void* VMA_NULLABLE pUserData);
2379 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2380 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2381 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2383 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2384 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2385 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2387 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2388 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2478 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2506 #if VMA_EXTERNAL_MEMORY
2563 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2571 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2581 uint32_t memoryTypeIndex,
2582 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2594 uint32_t frameIndex);
2690 #ifndef VMA_STATS_STRING_ENABLED
2691 #define VMA_STATS_STRING_ENABLED 1
2694 #if VMA_STATS_STRING_ENABLED
2701 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2702 VkBool32 detailedMap);
2706 char* VMA_NULLABLE pStatsString);
2967 uint32_t memoryTypeBits,
2969 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2985 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2987 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3003 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3005 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3172 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3200 size_t* VMA_NULLABLE pLostAllocationCount);
3227 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3237 const char* VMA_NULLABLE pName);
3331 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3357 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3359 size_t allocationCount,
3360 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3361 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3371 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3379 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3404 size_t allocationCount,
3405 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3462 void* VMA_NULLABLE pUserData);
3519 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3557 VkDeviceSize offset,
3584 VkDeviceSize offset,
3603 uint32_t allocationCount,
3604 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3605 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3606 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3624 uint32_t allocationCount,
3625 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3626 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3627 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3740 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3878 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3879 size_t allocationCount,
3880 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3899 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3914 VkDeviceSize allocationLocalOffset,
3915 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3916 const void* VMA_NULLABLE pNext);
3933 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3948 VkDeviceSize allocationLocalOffset,
3949 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3950 const void* VMA_NULLABLE pNext);
3984 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3986 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3998 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
4000 VkDeviceSize minAlignment,
4001 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
4018 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
4024 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
4026 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
4043 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
4053 #if defined(__cplusplus) && defined(__INTELLISENSE__)
4054 #define VMA_IMPLEMENTATION
4057 #ifdef VMA_IMPLEMENTATION
4058 #undef VMA_IMPLEMENTATION
4065 #if VMA_RECORDING_ENABLED
4068 #include <windows.h>
4088 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4089 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4098 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4099 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4100 #if defined(VK_NO_PROTOTYPES)
4101 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4102 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4115 #if VMA_USE_STL_CONTAINERS
4116 #define VMA_USE_STL_VECTOR 1
4117 #define VMA_USE_STL_UNORDERED_MAP 1
4118 #define VMA_USE_STL_LIST 1
4121 #ifndef VMA_USE_STL_SHARED_MUTEX
4123 #if __cplusplus >= 201703L
4124 #define VMA_USE_STL_SHARED_MUTEX 1
4128 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4129 #define VMA_USE_STL_SHARED_MUTEX 1
4131 #define VMA_USE_STL_SHARED_MUTEX 0
4139 #if VMA_USE_STL_VECTOR
4143 #if VMA_USE_STL_UNORDERED_MAP
4144 #include <unordered_map>
4147 #if VMA_USE_STL_LIST
4156 #include <algorithm>
4161 #define VMA_NULL nullptr
4164 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4166 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4169 if(alignment <
sizeof(
void*))
4171 alignment =
sizeof(
void*);
4174 return memalign(alignment, size);
4176 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4179 #if defined(__APPLE__)
4180 #include <AvailabilityMacros.h>
4183 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4185 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4186 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4193 if (__builtin_available(macOS 10.15, iOS 13, *))
4194 return aligned_alloc(alignment, size);
4198 if(alignment <
sizeof(
void*))
4200 alignment =
sizeof(
void*);
4204 if(posix_memalign(&pointer, alignment, size) == 0)
4208 #elif defined(_WIN32)
4209 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4211 return _aligned_malloc(size, alignment);
4214 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4216 return aligned_alloc(alignment, size);
4221 static void vma_aligned_free(
void* ptr)
4226 static void vma_aligned_free(
void* VMA_NULLABLE ptr)
4240 #define VMA_ASSERT(expr)
4242 #define VMA_ASSERT(expr) assert(expr)
4248 #ifndef VMA_HEAVY_ASSERT
4250 #define VMA_HEAVY_ASSERT(expr)
4252 #define VMA_HEAVY_ASSERT(expr)
4256 #ifndef VMA_ALIGN_OF
4257 #define VMA_ALIGN_OF(type) (__alignof(type))
4260 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4261 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4264 #ifndef VMA_SYSTEM_ALIGNED_FREE
4266 #if defined(VMA_SYSTEM_FREE)
4267 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4269 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4274 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4278 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4282 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4286 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4289 #ifndef VMA_DEBUG_LOG
4290 #define VMA_DEBUG_LOG(format, ...)
4300 #if VMA_STATS_STRING_ENABLED
4301 static inline void VmaUint32ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint32_t num)
4303 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4305 static inline void VmaUint64ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint64_t num)
4307 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4309 static inline void VmaPtrToStr(
char* VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
4311 snprintf(outStr, strLen,
"%p", ptr);
4319 void Lock() { m_Mutex.lock(); }
4320 void Unlock() { m_Mutex.unlock(); }
4321 bool TryLock() {
return m_Mutex.try_lock(); }
4325 #define VMA_MUTEX VmaMutex
4329 #ifndef VMA_RW_MUTEX
4330 #if VMA_USE_STL_SHARED_MUTEX
4332 #include <shared_mutex>
4336 void LockRead() { m_Mutex.lock_shared(); }
4337 void UnlockRead() { m_Mutex.unlock_shared(); }
4338 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4339 void LockWrite() { m_Mutex.lock(); }
4340 void UnlockWrite() { m_Mutex.unlock(); }
4341 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4343 std::shared_mutex m_Mutex;
4345 #define VMA_RW_MUTEX VmaRWMutex
4346 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4352 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4353 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4354 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4355 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4356 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4357 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4358 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4362 #define VMA_RW_MUTEX VmaRWMutex
4368 void LockRead() { m_Mutex.Lock(); }
4369 void UnlockRead() { m_Mutex.Unlock(); }
4370 bool TryLockRead() {
return m_Mutex.TryLock(); }
4371 void LockWrite() { m_Mutex.Lock(); }
4372 void UnlockWrite() { m_Mutex.Unlock(); }
4373 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4377 #define VMA_RW_MUTEX VmaRWMutex
4384 #ifndef VMA_ATOMIC_UINT32
4386 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4389 #ifndef VMA_ATOMIC_UINT64
4391 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4394 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4399 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4402 #ifndef VMA_MIN_ALIGNMENT
4407 #ifdef VMA_DEBUG_ALIGNMENT
4408 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT
4410 #define VMA_MIN_ALIGNMENT (1)
4414 #ifndef VMA_DEBUG_MARGIN
4419 #define VMA_DEBUG_MARGIN (0)
4422 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4427 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4430 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4436 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4439 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4444 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4447 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4452 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4455 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
4460 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
4463 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4465 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4468 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4470 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4473 #ifndef VMA_CLASS_NO_COPY
4474 #define VMA_CLASS_NO_COPY(className) \
4476 className(const className&) = delete; \
4477 className& operator=(const className&) = delete;
4480 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4483 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4485 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4486 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4494 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4495 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4496 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4498 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4500 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4501 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4504 static inline uint32_t VmaCountBitsSet(uint32_t v)
4506 uint32_t c = v - ((v >> 1) & 0x55555555);
4507 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4508 c = ((c >> 4) + c) & 0x0F0F0F0F;
4509 c = ((c >> 8) + c) & 0x00FF00FF;
4510 c = ((c >> 16) + c) & 0x0000FFFF;
4519 template <
typename T>
4520 inline bool VmaIsPow2(T x)
4522 return (x & (x-1)) == 0;
4527 template <
typename T>
4528 static inline T VmaAlignUp(T val, T alignment)
4530 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4531 return (val + alignment - 1) & ~(alignment - 1);
4535 template <
typename T>
4536 static inline T VmaAlignDown(T val, T alignment)
4538 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4539 return val & ~(alignment - 1);
4543 template <
typename T>
4544 static inline T VmaRoundDiv(T x, T y)
4546 return (x + (y / (T)2)) / y;
4550 static inline uint32_t VmaNextPow2(uint32_t v)
4561 static inline uint64_t VmaNextPow2(uint64_t v)
4575 static inline uint32_t VmaPrevPow2(uint32_t v)
4585 static inline uint64_t VmaPrevPow2(uint64_t v)
4597 static inline bool VmaStrIsEmpty(
const char* pStr)
4599 return pStr == VMA_NULL || *pStr ==
'\0';
4602 #if VMA_STATS_STRING_ENABLED
4604 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4624 template<
typename Iterator,
typename Compare>
4625 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4627 Iterator centerValue = end; --centerValue;
4628 Iterator insertIndex = beg;
4629 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4631 if(cmp(*memTypeIndex, *centerValue))
4633 if(insertIndex != memTypeIndex)
4635 VMA_SWAP(*memTypeIndex, *insertIndex);
4640 if(insertIndex != centerValue)
4642 VMA_SWAP(*insertIndex, *centerValue);
4647 template<
typename Iterator,
typename Compare>
4648 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4652 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4653 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4654 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4658 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4669 static inline bool VmaBlocksOnSamePage(
4670 VkDeviceSize resourceAOffset,
4671 VkDeviceSize resourceASize,
4672 VkDeviceSize resourceBOffset,
4673 VkDeviceSize pageSize)
4675 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4676 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4677 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4678 VkDeviceSize resourceBStart = resourceBOffset;
4679 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4680 return resourceAEndPage == resourceBStartPage;
4683 enum VmaSuballocationType
4685 VMA_SUBALLOCATION_TYPE_FREE = 0,
4686 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4687 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4688 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4689 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4690 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4691 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4700 static inline bool VmaIsBufferImageGranularityConflict(
4701 VmaSuballocationType suballocType1,
4702 VmaSuballocationType suballocType2)
4704 if(suballocType1 > suballocType2)
4706 VMA_SWAP(suballocType1, suballocType2);
4709 switch(suballocType1)
4711 case VMA_SUBALLOCATION_TYPE_FREE:
4713 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4715 case VMA_SUBALLOCATION_TYPE_BUFFER:
4717 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4718 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4719 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4721 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4722 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4723 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4724 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4726 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4727 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4735 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4737 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4738 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4739 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4740 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4742 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4749 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4751 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4752 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4753 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4754 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4756 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4769 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4771 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4772 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4773 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4774 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4780 VMA_CLASS_NO_COPY(VmaMutexLock)
4782 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4783 m_pMutex(useMutex ? &mutex : VMA_NULL)
4784 {
if(m_pMutex) { m_pMutex->Lock(); } }
4786 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4788 VMA_MUTEX* m_pMutex;
4792 struct VmaMutexLockRead
4794 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4796 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4797 m_pMutex(useMutex ? &mutex : VMA_NULL)
4798 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4799 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4801 VMA_RW_MUTEX* m_pMutex;
4805 struct VmaMutexLockWrite
4807 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4809 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4810 m_pMutex(useMutex ? &mutex : VMA_NULL)
4811 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4812 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4814 VMA_RW_MUTEX* m_pMutex;
4817 #if VMA_DEBUG_GLOBAL_MUTEX
4818 static VMA_MUTEX gDebugGlobalMutex;
4819 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4821 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4825 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4836 template <
typename CmpLess,
typename IterT,
typename KeyT>
4837 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4839 size_t down = 0, up = (end - beg);
4842 const size_t mid = down + (up - down) / 2;
4843 if(cmp(*(beg+mid), key))
4855 template<
typename CmpLess,
typename IterT,
typename KeyT>
4856 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4858 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4859 beg, end, value, cmp);
4861 (!cmp(*it, value) && !cmp(value, *it)))
4873 template<
typename T>
4874 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4876 for(uint32_t i = 0; i < count; ++i)
4878 const T iPtr = arr[i];
4879 if(iPtr == VMA_NULL)
4883 for(uint32_t j = i + 1; j < count; ++j)
4894 template<
typename MainT,
typename NewT>
4895 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4897 newStruct->pNext = mainStruct->pNext;
4898 mainStruct->pNext = newStruct;
4904 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4906 void* result = VMA_NULL;
4907 if((pAllocationCallbacks != VMA_NULL) &&
4908 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4910 result = (*pAllocationCallbacks->pfnAllocation)(
4911 pAllocationCallbacks->pUserData,
4914 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4918 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4920 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4924 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4926 if((pAllocationCallbacks != VMA_NULL) &&
4927 (pAllocationCallbacks->pfnFree != VMA_NULL))
4929 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4933 VMA_SYSTEM_ALIGNED_FREE(ptr);
4937 template<
typename T>
4938 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4940 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4943 template<
typename T>
4944 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4946 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4949 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4951 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4953 template<
typename T>
4954 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4957 VmaFree(pAllocationCallbacks, ptr);
4960 template<
typename T>
4961 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4965 for(
size_t i = count; i--; )
4969 VmaFree(pAllocationCallbacks, ptr);
4973 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4975 if(srcStr != VMA_NULL)
4977 const size_t len = strlen(srcStr);
4978 char*
const result = vma_new_array(allocs,
char, len + 1);
4979 memcpy(result, srcStr, len + 1);
4988 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4992 const size_t len = strlen(str);
4993 vma_delete_array(allocs, str, len + 1);
4998 template<
typename T>
4999 class VmaStlAllocator
5002 const VkAllocationCallbacks*
const m_pCallbacks;
5003 typedef T value_type;
5005 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
5006 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
5008 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
5009 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
5011 template<
typename U>
5012 bool operator==(
const VmaStlAllocator<U>& rhs)
const
5014 return m_pCallbacks == rhs.m_pCallbacks;
5016 template<
typename U>
5017 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
5019 return m_pCallbacks != rhs.m_pCallbacks;
5022 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
5023 VmaStlAllocator(
const VmaStlAllocator&) =
default;
5026 #if VMA_USE_STL_VECTOR
5028 #define VmaVector std::vector
5030 template<
typename T,
typename allocatorT>
5031 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
5033 vec.insert(vec.begin() + index, item);
5036 template<
typename T,
typename allocatorT>
5037 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
5039 vec.erase(vec.begin() + index);
5047 template<
typename T,
typename AllocatorT>
5051 typedef T value_type;
5053 VmaVector(
const AllocatorT& allocator) :
5054 m_Allocator(allocator),
5061 VmaVector(
size_t count,
const AllocatorT& allocator) :
5062 m_Allocator(allocator),
5063 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
5071 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
5072 : VmaVector(count, allocator) {}
5074 VmaVector(
const VmaVector<T, AllocatorT>& src) :
5075 m_Allocator(src.m_Allocator),
5076 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
5077 m_Count(src.m_Count),
5078 m_Capacity(src.m_Count)
5082 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
5088 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5091 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5095 resize(rhs.m_Count);
5098 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5104 bool empty()
const {
return m_Count == 0; }
5105 size_t size()
const {
return m_Count; }
5106 T* data() {
return m_pArray; }
5107 const T* data()
const {
return m_pArray; }
5109 T& operator[](
size_t index)
5111 VMA_HEAVY_ASSERT(index < m_Count);
5112 return m_pArray[index];
5114 const T& operator[](
size_t index)
const
5116 VMA_HEAVY_ASSERT(index < m_Count);
5117 return m_pArray[index];
5122 VMA_HEAVY_ASSERT(m_Count > 0);
5125 const T& front()
const
5127 VMA_HEAVY_ASSERT(m_Count > 0);
5132 VMA_HEAVY_ASSERT(m_Count > 0);
5133 return m_pArray[m_Count - 1];
5135 const T& back()
const
5137 VMA_HEAVY_ASSERT(m_Count > 0);
5138 return m_pArray[m_Count - 1];
5141 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5143 newCapacity = VMA_MAX(newCapacity, m_Count);
5145 if((newCapacity < m_Capacity) && !freeMemory)
5147 newCapacity = m_Capacity;
5150 if(newCapacity != m_Capacity)
5152 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5155 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5157 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5158 m_Capacity = newCapacity;
5159 m_pArray = newArray;
5163 void resize(
size_t newCount)
5165 size_t newCapacity = m_Capacity;
5166 if(newCount > m_Capacity)
5168 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5171 if(newCapacity != m_Capacity)
5173 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5174 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5175 if(elementsToCopy != 0)
5177 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5179 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5180 m_Capacity = newCapacity;
5181 m_pArray = newArray;
5192 void shrink_to_fit()
5194 if(m_Capacity > m_Count)
5196 T* newArray = VMA_NULL;
5199 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
5200 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5202 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5203 m_Capacity = m_Count;
5204 m_pArray = newArray;
5208 void insert(
size_t index,
const T& src)
5210 VMA_HEAVY_ASSERT(index <= m_Count);
5211 const size_t oldCount = size();
5212 resize(oldCount + 1);
5213 if(index < oldCount)
5215 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5217 m_pArray[index] = src;
5220 void remove(
size_t index)
5222 VMA_HEAVY_ASSERT(index < m_Count);
5223 const size_t oldCount = size();
5224 if(index < oldCount - 1)
5226 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5228 resize(oldCount - 1);
5231 void push_back(
const T& src)
5233 const size_t newIndex = size();
5234 resize(newIndex + 1);
5235 m_pArray[newIndex] = src;
5240 VMA_HEAVY_ASSERT(m_Count > 0);
5244 void push_front(
const T& src)
5251 VMA_HEAVY_ASSERT(m_Count > 0);
5255 typedef T* iterator;
5256 typedef const T* const_iterator;
5258 iterator begin() {
return m_pArray; }
5259 iterator end() {
return m_pArray + m_Count; }
5260 const_iterator cbegin()
const {
return m_pArray; }
5261 const_iterator cend()
const {
return m_pArray + m_Count; }
5262 const_iterator begin()
const {
return cbegin(); }
5263 const_iterator end()
const {
return cend(); }
5266 AllocatorT m_Allocator;
5272 template<
typename T,
typename allocatorT>
5273 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5275 vec.insert(index, item);
5278 template<
typename T,
typename allocatorT>
5279 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5286 template<
typename CmpLess,
typename VectorT>
5287 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5289 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5291 vector.data() + vector.size(),
5293 CmpLess()) - vector.data();
5294 VmaVectorInsert(vector, indexToInsert, value);
5295 return indexToInsert;
5298 template<
typename CmpLess,
typename VectorT>
5299 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5302 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5307 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5309 size_t indexToRemove = it - vector.begin();
5310 VmaVectorRemove(vector, indexToRemove);
5327 template<
typename T,
typename AllocatorT,
size_t N>
5328 class VmaSmallVector
5331 typedef T value_type;
5333 VmaSmallVector(
const AllocatorT& allocator) :
5335 m_DynamicArray(allocator)
5338 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5340 m_DynamicArray(count > N ? count : 0, allocator)
5343 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5344 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5345 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5346 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5348 bool empty()
const {
return m_Count == 0; }
5349 size_t size()
const {
return m_Count; }
5350 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5351 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5353 T& operator[](
size_t index)
5355 VMA_HEAVY_ASSERT(index < m_Count);
5356 return data()[index];
5358 const T& operator[](
size_t index)
const
5360 VMA_HEAVY_ASSERT(index < m_Count);
5361 return data()[index];
5366 VMA_HEAVY_ASSERT(m_Count > 0);
5369 const T& front()
const
5371 VMA_HEAVY_ASSERT(m_Count > 0);
5376 VMA_HEAVY_ASSERT(m_Count > 0);
5377 return data()[m_Count - 1];
5379 const T& back()
const
5381 VMA_HEAVY_ASSERT(m_Count > 0);
5382 return data()[m_Count - 1];
5385 void resize(
size_t newCount,
bool freeMemory =
false)
5387 if(newCount > N && m_Count > N)
5390 m_DynamicArray.resize(newCount);
5393 m_DynamicArray.shrink_to_fit();
5396 else if(newCount > N && m_Count <= N)
5399 m_DynamicArray.resize(newCount);
5402 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5405 else if(newCount <= N && m_Count > N)
5410 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5412 m_DynamicArray.resize(0);
5415 m_DynamicArray.shrink_to_fit();
5425 void clear(
bool freeMemory =
false)
5427 m_DynamicArray.clear();
5430 m_DynamicArray.shrink_to_fit();
5435 void insert(
size_t index,
const T& src)
5437 VMA_HEAVY_ASSERT(index <= m_Count);
5438 const size_t oldCount = size();
5439 resize(oldCount + 1);
5440 T*
const dataPtr = data();
5441 if(index < oldCount)
5444 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5446 dataPtr[index] = src;
5449 void remove(
size_t index)
5451 VMA_HEAVY_ASSERT(index < m_Count);
5452 const size_t oldCount = size();
5453 if(index < oldCount - 1)
5456 T*
const dataPtr = data();
5457 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5459 resize(oldCount - 1);
5462 void push_back(
const T& src)
5464 const size_t newIndex = size();
5465 resize(newIndex + 1);
5466 data()[newIndex] = src;
5471 VMA_HEAVY_ASSERT(m_Count > 0);
5475 void push_front(
const T& src)
5482 VMA_HEAVY_ASSERT(m_Count > 0);
5486 typedef T* iterator;
5488 iterator begin() {
return data(); }
5489 iterator end() {
return data() + m_Count; }
5494 VmaVector<T, AllocatorT> m_DynamicArray;
5505 template<
typename T>
5506 class VmaPoolAllocator
5508 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5510 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5511 ~VmaPoolAllocator();
5512 template<
typename... Types> T* Alloc(Types... args);
5518 uint32_t NextFreeIndex;
5519 alignas(T)
char Value[
sizeof(T)];
5526 uint32_t FirstFreeIndex;
5529 const VkAllocationCallbacks* m_pAllocationCallbacks;
5530 const uint32_t m_FirstBlockCapacity;
5531 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5533 ItemBlock& CreateNewBlock();
5536 template<
typename T>
5537 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5538 m_pAllocationCallbacks(pAllocationCallbacks),
5539 m_FirstBlockCapacity(firstBlockCapacity),
5540 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5542 VMA_ASSERT(m_FirstBlockCapacity > 1);
5545 template<
typename T>
5546 VmaPoolAllocator<T>::~VmaPoolAllocator()
5548 for(
size_t i = m_ItemBlocks.size(); i--; )
5549 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5550 m_ItemBlocks.clear();
5553 template<
typename T>
5554 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5556 for(
size_t i = m_ItemBlocks.size(); i--; )
5558 ItemBlock& block = m_ItemBlocks[i];
5560 if(block.FirstFreeIndex != UINT32_MAX)
5562 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5563 block.FirstFreeIndex = pItem->NextFreeIndex;
5564 T* result = (T*)&pItem->Value;
5565 new(result)T(std::forward<Types>(args)...);
5571 ItemBlock& newBlock = CreateNewBlock();
5572 Item*
const pItem = &newBlock.pItems[0];
5573 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5574 T* result = (T*)&pItem->Value;
5575 new(result)T(std::forward<Types>(args)...);
5579 template<
typename T>
5580 void VmaPoolAllocator<T>::Free(T* ptr)
5583 for(
size_t i = m_ItemBlocks.size(); i--; )
5585 ItemBlock& block = m_ItemBlocks[i];
5589 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5592 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5595 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5596 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5597 block.FirstFreeIndex = index;
5601 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5604 template<
typename T>
5605 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5607 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5608 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5610 const ItemBlock newBlock = {
5611 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5615 m_ItemBlocks.push_back(newBlock);
5618 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5619 newBlock.pItems[i].NextFreeIndex = i + 1;
5620 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5621 return m_ItemBlocks.back();
5627 #if VMA_USE_STL_LIST
5629 #define VmaList std::list
5633 template<
typename T>
5642 template<
typename T>
5645 VMA_CLASS_NO_COPY(VmaRawList)
5647 typedef VmaListItem<T> ItemType;
5649 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5653 size_t GetCount()
const {
return m_Count; }
5654 bool IsEmpty()
const {
return m_Count == 0; }
5656 ItemType* Front() {
return m_pFront; }
5657 const ItemType* Front()
const {
return m_pFront; }
5658 ItemType* Back() {
return m_pBack; }
5659 const ItemType* Back()
const {
return m_pBack; }
5661 ItemType* PushBack();
5662 ItemType* PushFront();
5663 ItemType* PushBack(
const T& value);
5664 ItemType* PushFront(
const T& value);
5669 ItemType* InsertBefore(ItemType* pItem);
5671 ItemType* InsertAfter(ItemType* pItem);
5673 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5674 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5676 void Remove(ItemType* pItem);
5679 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5680 VmaPoolAllocator<ItemType> m_ItemAllocator;
5686 template<
typename T>
5687 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5688 m_pAllocationCallbacks(pAllocationCallbacks),
5689 m_ItemAllocator(pAllocationCallbacks, 128),
5696 template<
typename T>
5697 VmaRawList<T>::~VmaRawList()
5703 template<
typename T>
5704 void VmaRawList<T>::Clear()
5706 if(IsEmpty() ==
false)
5708 ItemType* pItem = m_pBack;
5709 while(pItem != VMA_NULL)
5711 ItemType*
const pPrevItem = pItem->pPrev;
5712 m_ItemAllocator.Free(pItem);
5715 m_pFront = VMA_NULL;
5721 template<
typename T>
5722 VmaListItem<T>* VmaRawList<T>::PushBack()
5724 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5725 pNewItem->pNext = VMA_NULL;
5728 pNewItem->pPrev = VMA_NULL;
5729 m_pFront = pNewItem;
5735 pNewItem->pPrev = m_pBack;
5736 m_pBack->pNext = pNewItem;
5743 template<
typename T>
5744 VmaListItem<T>* VmaRawList<T>::PushFront()
5746 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5747 pNewItem->pPrev = VMA_NULL;
5750 pNewItem->pNext = VMA_NULL;
5751 m_pFront = pNewItem;
5757 pNewItem->pNext = m_pFront;
5758 m_pFront->pPrev = pNewItem;
5759 m_pFront = pNewItem;
5765 template<
typename T>
5766 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5768 ItemType*
const pNewItem = PushBack();
5769 pNewItem->Value = value;
5773 template<
typename T>
5774 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5776 ItemType*
const pNewItem = PushFront();
5777 pNewItem->Value = value;
5781 template<
typename T>
5782 void VmaRawList<T>::PopBack()
5784 VMA_HEAVY_ASSERT(m_Count > 0);
5785 ItemType*
const pBackItem = m_pBack;
5786 ItemType*
const pPrevItem = pBackItem->pPrev;
5787 if(pPrevItem != VMA_NULL)
5789 pPrevItem->pNext = VMA_NULL;
5791 m_pBack = pPrevItem;
5792 m_ItemAllocator.Free(pBackItem);
5796 template<
typename T>
5797 void VmaRawList<T>::PopFront()
5799 VMA_HEAVY_ASSERT(m_Count > 0);
5800 ItemType*
const pFrontItem = m_pFront;
5801 ItemType*
const pNextItem = pFrontItem->pNext;
5802 if(pNextItem != VMA_NULL)
5804 pNextItem->pPrev = VMA_NULL;
5806 m_pFront = pNextItem;
5807 m_ItemAllocator.Free(pFrontItem);
5811 template<
typename T>
5812 void VmaRawList<T>::Remove(ItemType* pItem)
5814 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5815 VMA_HEAVY_ASSERT(m_Count > 0);
5817 if(pItem->pPrev != VMA_NULL)
5819 pItem->pPrev->pNext = pItem->pNext;
5823 VMA_HEAVY_ASSERT(m_pFront == pItem);
5824 m_pFront = pItem->pNext;
5827 if(pItem->pNext != VMA_NULL)
5829 pItem->pNext->pPrev = pItem->pPrev;
5833 VMA_HEAVY_ASSERT(m_pBack == pItem);
5834 m_pBack = pItem->pPrev;
5837 m_ItemAllocator.Free(pItem);
5841 template<
typename T>
5842 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5844 if(pItem != VMA_NULL)
5846 ItemType*
const prevItem = pItem->pPrev;
5847 ItemType*
const newItem = m_ItemAllocator.Alloc();
5848 newItem->pPrev = prevItem;
5849 newItem->pNext = pItem;
5850 pItem->pPrev = newItem;
5851 if(prevItem != VMA_NULL)
5853 prevItem->pNext = newItem;
5857 VMA_HEAVY_ASSERT(m_pFront == pItem);
5867 template<
typename T>
5868 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5870 if(pItem != VMA_NULL)
5872 ItemType*
const nextItem = pItem->pNext;
5873 ItemType*
const newItem = m_ItemAllocator.Alloc();
5874 newItem->pNext = nextItem;
5875 newItem->pPrev = pItem;
5876 pItem->pNext = newItem;
5877 if(nextItem != VMA_NULL)
5879 nextItem->pPrev = newItem;
5883 VMA_HEAVY_ASSERT(m_pBack == pItem);
5893 template<
typename T>
5894 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5896 ItemType*
const newItem = InsertBefore(pItem);
5897 newItem->Value = value;
5901 template<
typename T>
5902 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5904 ItemType*
const newItem = InsertAfter(pItem);
5905 newItem->Value = value;
5909 template<
typename T,
typename AllocatorT>
5912 VMA_CLASS_NO_COPY(VmaList)
5923 T& operator*()
const
5925 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5926 return m_pItem->Value;
5928 T* operator->()
const
5930 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5931 return &m_pItem->Value;
5934 iterator& operator++()
5936 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5937 m_pItem = m_pItem->pNext;
5940 iterator& operator--()
5942 if(m_pItem != VMA_NULL)
5944 m_pItem = m_pItem->pPrev;
5948 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5949 m_pItem = m_pList->Back();
5954 iterator operator++(
int)
5956 iterator result = *
this;
5960 iterator operator--(
int)
5962 iterator result = *
this;
5967 bool operator==(
const iterator& rhs)
const
5969 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5970 return m_pItem == rhs.m_pItem;
5972 bool operator!=(
const iterator& rhs)
const
5974 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5975 return m_pItem != rhs.m_pItem;
5979 VmaRawList<T>* m_pList;
5980 VmaListItem<T>* m_pItem;
5982 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5988 friend class VmaList<T, AllocatorT>;
5991 class const_iterator
6000 const_iterator(
const iterator& src) :
6001 m_pList(src.m_pList),
6002 m_pItem(src.m_pItem)
6006 const T& operator*()
const
6008 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
6009 return m_pItem->Value;
6011 const T* operator->()
const
6013 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
6014 return &m_pItem->Value;
6017 const_iterator& operator++()
6019 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
6020 m_pItem = m_pItem->pNext;
6023 const_iterator& operator--()
6025 if(m_pItem != VMA_NULL)
6027 m_pItem = m_pItem->pPrev;
6031 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
6032 m_pItem = m_pList->Back();
6037 const_iterator operator++(
int)
6039 const_iterator result = *
this;
6043 const_iterator operator--(
int)
6045 const_iterator result = *
this;
6050 bool operator==(
const const_iterator& rhs)
const
6052 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6053 return m_pItem == rhs.m_pItem;
6055 bool operator!=(
const const_iterator& rhs)
const
6057 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6058 return m_pItem != rhs.m_pItem;
6062 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
6068 const VmaRawList<T>* m_pList;
6069 const VmaListItem<T>* m_pItem;
6071 friend class VmaList<T, AllocatorT>;
6074 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
6076 bool empty()
const {
return m_RawList.IsEmpty(); }
6077 size_t size()
const {
return m_RawList.GetCount(); }
6079 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
6080 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
6082 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
6083 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
6085 const_iterator begin()
const {
return cbegin(); }
6086 const_iterator end()
const {
return cend(); }
6088 void clear() { m_RawList.Clear(); }
6089 void push_back(
const T& value) { m_RawList.PushBack(value); }
6090 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
6091 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
6094 VmaRawList<T> m_RawList;
6113 template<
typename ItemTypeTraits>
6114 class VmaIntrusiveLinkedList
6117 typedef typename ItemTypeTraits::ItemType ItemType;
6118 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
6119 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
6121 VmaIntrusiveLinkedList() { }
6122 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6123 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
6124 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
6126 src.m_Front = src.m_Back = VMA_NULL;
6129 ~VmaIntrusiveLinkedList()
6131 VMA_HEAVY_ASSERT(IsEmpty());
6133 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6134 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
6138 VMA_HEAVY_ASSERT(IsEmpty());
6139 m_Front = src.m_Front;
6140 m_Back = src.m_Back;
6141 m_Count = src.m_Count;
6142 src.m_Front = src.m_Back = VMA_NULL;
6151 ItemType* item = m_Back;
6152 while(item != VMA_NULL)
6154 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
6155 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6156 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6164 size_t GetCount()
const {
return m_Count; }
6165 bool IsEmpty()
const {
return m_Count == 0; }
6166 ItemType* Front() {
return m_Front; }
6167 const ItemType* Front()
const {
return m_Front; }
6168 ItemType* Back() {
return m_Back; }
6169 const ItemType* Back()
const {
return m_Back; }
6170 void PushBack(ItemType* item)
6172 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6181 ItemTypeTraits::AccessPrev(item) = m_Back;
6182 ItemTypeTraits::AccessNext(m_Back) = item;
6187 void PushFront(ItemType* item)
6189 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6198 ItemTypeTraits::AccessNext(item) = m_Front;
6199 ItemTypeTraits::AccessPrev(m_Front) = item;
6206 VMA_HEAVY_ASSERT(m_Count > 0);
6207 ItemType*
const backItem = m_Back;
6208 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
6209 if(prevItem != VMA_NULL)
6211 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
6215 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
6216 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
6219 ItemType* PopFront()
6221 VMA_HEAVY_ASSERT(m_Count > 0);
6222 ItemType*
const frontItem = m_Front;
6223 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
6224 if(nextItem != VMA_NULL)
6226 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
6230 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
6231 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
6236 void InsertBefore(ItemType* existingItem, ItemType* newItem)
6238 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6239 if(existingItem != VMA_NULL)
6241 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
6242 ItemTypeTraits::AccessPrev(newItem) = prevItem;
6243 ItemTypeTraits::AccessNext(newItem) = existingItem;
6244 ItemTypeTraits::AccessPrev(existingItem) = newItem;
6245 if(prevItem != VMA_NULL)
6247 ItemTypeTraits::AccessNext(prevItem) = newItem;
6251 VMA_HEAVY_ASSERT(m_Front == existingItem);
6260 void InsertAfter(ItemType* existingItem, ItemType* newItem)
6262 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6263 if(existingItem != VMA_NULL)
6265 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
6266 ItemTypeTraits::AccessNext(newItem) = nextItem;
6267 ItemTypeTraits::AccessPrev(newItem) = existingItem;
6268 ItemTypeTraits::AccessNext(existingItem) = newItem;
6269 if(nextItem != VMA_NULL)
6271 ItemTypeTraits::AccessPrev(nextItem) = newItem;
6275 VMA_HEAVY_ASSERT(m_Back == existingItem);
6281 return PushFront(newItem);
6283 void Remove(ItemType* item)
6285 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
6286 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
6288 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
6292 VMA_HEAVY_ASSERT(m_Front == item);
6293 m_Front = ItemTypeTraits::GetNext(item);
6296 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
6298 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
6302 VMA_HEAVY_ASSERT(m_Back == item);
6303 m_Back = ItemTypeTraits::GetPrev(item);
6305 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6306 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6310 ItemType* m_Front = VMA_NULL;
6311 ItemType* m_Back = VMA_NULL;
6321 #if VMA_USE_STL_UNORDERED_MAP
6323 #define VmaPair std::pair
6325 #define VMA_MAP_TYPE(KeyT, ValueT) \
6326 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
6330 template<
typename T1,
typename T2>
6336 VmaPair() : first(), second() { }
6337 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6343 template<
typename KeyT,
typename ValueT>
6347 typedef VmaPair<KeyT, ValueT> PairType;
6348 typedef PairType* iterator;
6350 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6352 iterator begin() {
return m_Vector.begin(); }
6353 iterator end() {
return m_Vector.end(); }
6355 void insert(
const PairType& pair);
6356 iterator find(
const KeyT& key);
6357 void erase(iterator it);
6360 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6363 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6365 template<
typename FirstT,
typename SecondT>
6366 struct VmaPairFirstLess
6368 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6370 return lhs.first < rhs.first;
6372 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6374 return lhs.first < rhsFirst;
6378 template<
typename KeyT,
typename ValueT>
6379 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6381 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6383 m_Vector.data() + m_Vector.size(),
6385 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6386 VmaVectorInsert(m_Vector, indexToInsert, pair);
6389 template<
typename KeyT,
typename ValueT>
6390 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6392 PairType* it = VmaBinaryFindFirstNotLess(
6394 m_Vector.data() + m_Vector.size(),
6396 VmaPairFirstLess<KeyT, ValueT>());
6397 if((it != m_Vector.end()) && (it->first == key))
6403 return m_Vector.end();
6407 template<
typename KeyT,
typename ValueT>
6408 void VmaMap<KeyT, ValueT>::erase(iterator it)
6410 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6419 class VmaDeviceMemoryBlock;
6421 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6423 struct VmaAllocation_T
6426 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6430 FLAG_USER_DATA_STRING = 0x01,
6434 enum ALLOCATION_TYPE
6436 ALLOCATION_TYPE_NONE,
6437 ALLOCATION_TYPE_BLOCK,
6438 ALLOCATION_TYPE_DEDICATED,
6445 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6448 m_pUserData{VMA_NULL},
6449 m_LastUseFrameIndex{currentFrameIndex},
6450 m_MemoryTypeIndex{0},
6451 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6452 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6454 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6456 #if VMA_STATS_STRING_ENABLED
6457 m_CreationFrameIndex = currentFrameIndex;
6458 m_BufferImageUsage = 0;
6464 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6467 VMA_ASSERT(m_pUserData == VMA_NULL);
6470 void InitBlockAllocation(
6471 VmaDeviceMemoryBlock* block,
6472 VkDeviceSize offset,
6473 VkDeviceSize alignment,
6475 uint32_t memoryTypeIndex,
6476 VmaSuballocationType suballocationType,
6480 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6481 VMA_ASSERT(block != VMA_NULL);
6482 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6483 m_Alignment = alignment;
6485 m_MemoryTypeIndex = memoryTypeIndex;
6486 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6487 m_SuballocationType = (uint8_t)suballocationType;
6488 m_BlockAllocation.m_Block = block;
6489 m_BlockAllocation.m_Offset = offset;
6490 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6495 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6496 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6497 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6498 m_MemoryTypeIndex = 0;
6499 m_BlockAllocation.m_Block = VMA_NULL;
6500 m_BlockAllocation.m_Offset = 0;
6501 m_BlockAllocation.m_CanBecomeLost =
true;
6504 void ChangeBlockAllocation(
6506 VmaDeviceMemoryBlock* block,
6507 VkDeviceSize offset);
6509 void ChangeOffset(VkDeviceSize newOffset);
6512 void InitDedicatedAllocation(
6513 uint32_t memoryTypeIndex,
6514 VkDeviceMemory hMemory,
6515 VmaSuballocationType suballocationType,
6519 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6520 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6521 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6524 m_MemoryTypeIndex = memoryTypeIndex;
6525 m_SuballocationType = (uint8_t)suballocationType;
6526 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6527 m_DedicatedAllocation.m_hMemory = hMemory;
6528 m_DedicatedAllocation.m_pMappedData = pMappedData;
6529 m_DedicatedAllocation.m_Prev = VMA_NULL;
6530 m_DedicatedAllocation.m_Next = VMA_NULL;
6533 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6534 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6535 VkDeviceSize GetSize()
const {
return m_Size; }
6536 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6537 void* GetUserData()
const {
return m_pUserData; }
6538 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6539 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6541 VmaDeviceMemoryBlock* GetBlock()
const
6543 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6544 return m_BlockAllocation.m_Block;
6546 VkDeviceSize GetOffset()
const;
6547 VkDeviceMemory GetMemory()
const;
6548 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6549 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6550 void* GetMappedData()
const;
6551 bool CanBecomeLost()
const;
6553 uint32_t GetLastUseFrameIndex()
const
6555 return m_LastUseFrameIndex.load();
6557 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6559 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6569 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6571 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6573 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6584 void BlockAllocMap();
6585 void BlockAllocUnmap();
6586 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6589 #if VMA_STATS_STRING_ENABLED
6590 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6591 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6593 void InitBufferImageUsage(uint32_t bufferImageUsage)
6595 VMA_ASSERT(m_BufferImageUsage == 0);
6596 m_BufferImageUsage = bufferImageUsage;
6599 void PrintParameters(
class VmaJsonWriter& json)
const;
6603 VkDeviceSize m_Alignment;
6604 VkDeviceSize m_Size;
6606 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6607 uint32_t m_MemoryTypeIndex;
6609 uint8_t m_SuballocationType;
6616 struct BlockAllocation
6618 VmaDeviceMemoryBlock* m_Block;
6619 VkDeviceSize m_Offset;
6620 bool m_CanBecomeLost;
6624 struct DedicatedAllocation
6626 VkDeviceMemory m_hMemory;
6627 void* m_pMappedData;
6628 VmaAllocation_T* m_Prev;
6629 VmaAllocation_T* m_Next;
6635 BlockAllocation m_BlockAllocation;
6637 DedicatedAllocation m_DedicatedAllocation;
6640 #if VMA_STATS_STRING_ENABLED
6641 uint32_t m_CreationFrameIndex;
6642 uint32_t m_BufferImageUsage;
6647 friend struct VmaDedicatedAllocationListItemTraits;
6650 struct VmaDedicatedAllocationListItemTraits
6652 typedef VmaAllocation_T ItemType;
6653 static ItemType* GetPrev(
const ItemType* item)
6655 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6656 return item->m_DedicatedAllocation.m_Prev;
6658 static ItemType* GetNext(
const ItemType* item)
6660 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6661 return item->m_DedicatedAllocation.m_Next;
6663 static ItemType*& AccessPrev(ItemType* item)
6665 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6666 return item->m_DedicatedAllocation.m_Prev;
6668 static ItemType*& AccessNext(ItemType* item){
6669 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6670 return item->m_DedicatedAllocation.m_Next;
6678 struct VmaSuballocation
6680 VkDeviceSize offset;
6683 VmaSuballocationType type;
6687 struct VmaSuballocationOffsetLess
6689 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6691 return lhs.offset < rhs.offset;
6694 struct VmaSuballocationOffsetGreater
6696 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6698 return lhs.offset > rhs.offset;
6702 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6705 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6707 enum class VmaAllocationRequestType
6729 struct VmaAllocationRequest
6731 VkDeviceSize offset;
6732 VkDeviceSize sumFreeSize;
6733 VkDeviceSize sumItemSize;
6734 VmaSuballocationList::iterator item;
6735 size_t itemsToMakeLostCount;
6737 VmaAllocationRequestType type;
6739 VkDeviceSize CalcCost()
const
6741 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6749 class VmaBlockMetadata
6753 virtual ~VmaBlockMetadata() { }
6754 virtual void Init(VkDeviceSize size) { m_Size = size; }
6757 virtual bool Validate()
const = 0;
6758 VkDeviceSize GetSize()
const {
return m_Size; }
6759 virtual size_t GetAllocationCount()
const = 0;
6760 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6761 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6763 virtual bool IsEmpty()
const = 0;
6765 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6767 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6769 #if VMA_STATS_STRING_ENABLED
6770 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6776 virtual bool CreateAllocationRequest(
6777 uint32_t currentFrameIndex,
6778 uint32_t frameInUseCount,
6779 VkDeviceSize bufferImageGranularity,
6780 VkDeviceSize allocSize,
6781 VkDeviceSize allocAlignment,
6783 VmaSuballocationType allocType,
6784 bool canMakeOtherLost,
6787 VmaAllocationRequest* pAllocationRequest) = 0;
6789 virtual bool MakeRequestedAllocationsLost(
6790 uint32_t currentFrameIndex,
6791 uint32_t frameInUseCount,
6792 VmaAllocationRequest* pAllocationRequest) = 0;
6794 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6796 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6800 const VmaAllocationRequest& request,
6801 VmaSuballocationType type,
6802 VkDeviceSize allocSize,
6807 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6810 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6812 #if VMA_STATS_STRING_ENABLED
6813 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6814 VkDeviceSize unusedBytes,
6815 size_t allocationCount,
6816 size_t unusedRangeCount)
const;
6817 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6818 VkDeviceSize offset,
6820 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6821 VkDeviceSize offset,
6822 VkDeviceSize size)
const;
6823 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6827 VkDeviceSize m_Size;
6828 const VkAllocationCallbacks* m_pAllocationCallbacks;
6831 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6832 VMA_ASSERT(0 && "Validation failed: " #cond); \
6836 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6838 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6841 virtual ~VmaBlockMetadata_Generic();
6842 virtual void Init(VkDeviceSize size);
6844 virtual bool Validate()
const;
6845 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6846 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6847 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6848 virtual bool IsEmpty()
const;
6850 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6851 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6853 #if VMA_STATS_STRING_ENABLED
6854 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6857 virtual bool CreateAllocationRequest(
6858 uint32_t currentFrameIndex,
6859 uint32_t frameInUseCount,
6860 VkDeviceSize bufferImageGranularity,
6861 VkDeviceSize allocSize,
6862 VkDeviceSize allocAlignment,
6864 VmaSuballocationType allocType,
6865 bool canMakeOtherLost,
6867 VmaAllocationRequest* pAllocationRequest);
6869 virtual bool MakeRequestedAllocationsLost(
6870 uint32_t currentFrameIndex,
6871 uint32_t frameInUseCount,
6872 VmaAllocationRequest* pAllocationRequest);
6874 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6876 virtual VkResult CheckCorruption(
const void* pBlockData);
6879 const VmaAllocationRequest& request,
6880 VmaSuballocationType type,
6881 VkDeviceSize allocSize,
6885 virtual void FreeAtOffset(VkDeviceSize offset);
6890 bool IsBufferImageGranularityConflictPossible(
6891 VkDeviceSize bufferImageGranularity,
6892 VmaSuballocationType& inOutPrevSuballocType)
const;
6895 friend class VmaDefragmentationAlgorithm_Generic;
6896 friend class VmaDefragmentationAlgorithm_Fast;
6898 uint32_t m_FreeCount;
6899 VkDeviceSize m_SumFreeSize;
6900 VmaSuballocationList m_Suballocations;
6903 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6905 bool ValidateFreeSuballocationList()
const;
6909 bool CheckAllocation(
6910 uint32_t currentFrameIndex,
6911 uint32_t frameInUseCount,
6912 VkDeviceSize bufferImageGranularity,
6913 VkDeviceSize allocSize,
6914 VkDeviceSize allocAlignment,
6915 VmaSuballocationType allocType,
6916 VmaSuballocationList::const_iterator suballocItem,
6917 bool canMakeOtherLost,
6918 VkDeviceSize* pOffset,
6919 size_t* itemsToMakeLostCount,
6920 VkDeviceSize* pSumFreeSize,
6921 VkDeviceSize* pSumItemSize)
const;
6923 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6927 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6930 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6933 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
7014 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
7016 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
7019 virtual ~VmaBlockMetadata_Linear();
7020 virtual void Init(VkDeviceSize size);
7022 virtual bool Validate()
const;
7023 virtual size_t GetAllocationCount()
const;
7024 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
7025 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7026 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
7028 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7029 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7031 #if VMA_STATS_STRING_ENABLED
7032 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7035 virtual bool CreateAllocationRequest(
7036 uint32_t currentFrameIndex,
7037 uint32_t frameInUseCount,
7038 VkDeviceSize bufferImageGranularity,
7039 VkDeviceSize allocSize,
7040 VkDeviceSize allocAlignment,
7042 VmaSuballocationType allocType,
7043 bool canMakeOtherLost,
7045 VmaAllocationRequest* pAllocationRequest);
7047 virtual bool MakeRequestedAllocationsLost(
7048 uint32_t currentFrameIndex,
7049 uint32_t frameInUseCount,
7050 VmaAllocationRequest* pAllocationRequest);
7052 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7054 virtual VkResult CheckCorruption(
const void* pBlockData);
7057 const VmaAllocationRequest& request,
7058 VmaSuballocationType type,
7059 VkDeviceSize allocSize,
7063 virtual void FreeAtOffset(VkDeviceSize offset);
7073 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
7075 enum SECOND_VECTOR_MODE
7077 SECOND_VECTOR_EMPTY,
7082 SECOND_VECTOR_RING_BUFFER,
7088 SECOND_VECTOR_DOUBLE_STACK,
7091 VkDeviceSize m_SumFreeSize;
7092 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7093 uint32_t m_1stVectorIndex;
7094 SECOND_VECTOR_MODE m_2ndVectorMode;
7096 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7097 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7098 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7099 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7102 size_t m_1stNullItemsBeginCount;
7104 size_t m_1stNullItemsMiddleCount;
7106 size_t m_2ndNullItemsCount;
7108 bool ShouldCompact1st()
const;
7109 void CleanupAfterFree();
7111 bool CreateAllocationRequest_LowerAddress(
7112 uint32_t currentFrameIndex,
7113 uint32_t frameInUseCount,
7114 VkDeviceSize bufferImageGranularity,
7115 VkDeviceSize allocSize,
7116 VkDeviceSize allocAlignment,
7117 VmaSuballocationType allocType,
7118 bool canMakeOtherLost,
7120 VmaAllocationRequest* pAllocationRequest);
7121 bool CreateAllocationRequest_UpperAddress(
7122 uint32_t currentFrameIndex,
7123 uint32_t frameInUseCount,
7124 VkDeviceSize bufferImageGranularity,
7125 VkDeviceSize allocSize,
7126 VkDeviceSize allocAlignment,
7127 VmaSuballocationType allocType,
7128 bool canMakeOtherLost,
7130 VmaAllocationRequest* pAllocationRequest);
7144 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
7146 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
7149 virtual ~VmaBlockMetadata_Buddy();
7150 virtual void Init(VkDeviceSize size);
7152 virtual bool Validate()
const;
7153 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
7154 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
7155 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7156 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
7158 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7159 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7161 #if VMA_STATS_STRING_ENABLED
7162 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7165 virtual bool CreateAllocationRequest(
7166 uint32_t currentFrameIndex,
7167 uint32_t frameInUseCount,
7168 VkDeviceSize bufferImageGranularity,
7169 VkDeviceSize allocSize,
7170 VkDeviceSize allocAlignment,
7172 VmaSuballocationType allocType,
7173 bool canMakeOtherLost,
7175 VmaAllocationRequest* pAllocationRequest);
7177 virtual bool MakeRequestedAllocationsLost(
7178 uint32_t currentFrameIndex,
7179 uint32_t frameInUseCount,
7180 VmaAllocationRequest* pAllocationRequest);
7182 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7184 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
7187 const VmaAllocationRequest& request,
7188 VmaSuballocationType type,
7189 VkDeviceSize allocSize,
7192 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
7193 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
7196 static const VkDeviceSize MIN_NODE_SIZE = 32;
7197 static const size_t MAX_LEVELS = 30;
7199 struct ValidationContext
7201 size_t calculatedAllocationCount;
7202 size_t calculatedFreeCount;
7203 VkDeviceSize calculatedSumFreeSize;
7205 ValidationContext() :
7206 calculatedAllocationCount(0),
7207 calculatedFreeCount(0),
7208 calculatedSumFreeSize(0) { }
7213 VkDeviceSize offset;
7243 VkDeviceSize m_UsableSize;
7244 uint32_t m_LevelCount;
7250 } m_FreeList[MAX_LEVELS];
7252 size_t m_AllocationCount;
7256 VkDeviceSize m_SumFreeSize;
7258 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
7259 void DeleteNode(Node* node);
7260 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
7261 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
7262 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
7264 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
7265 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
7269 void AddToFreeListFront(uint32_t level, Node* node);
7273 void RemoveFromFreeList(uint32_t level, Node* node);
7275 #if VMA_STATS_STRING_ENABLED
7276 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
7286 class VmaDeviceMemoryBlock
7288 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
7290 VmaBlockMetadata* m_pMetadata;
7294 ~VmaDeviceMemoryBlock()
7296 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
7297 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
7304 uint32_t newMemoryTypeIndex,
7305 VkDeviceMemory newMemory,
7306 VkDeviceSize newSize,
7308 uint32_t algorithm);
7312 VmaPool GetParentPool()
const {
return m_hParentPool; }
7313 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
7314 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7315 uint32_t GetId()
const {
return m_Id; }
7316 void* GetMappedData()
const {
return m_pMappedData; }
7319 bool Validate()
const;
7324 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
7327 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7328 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7330 VkResult BindBufferMemory(
7333 VkDeviceSize allocationLocalOffset,
7336 VkResult BindImageMemory(
7339 VkDeviceSize allocationLocalOffset,
7345 uint32_t m_MemoryTypeIndex;
7347 VkDeviceMemory m_hMemory;
7355 uint32_t m_MapCount;
7356 void* m_pMappedData;
7359 struct VmaDefragmentationMove
7361 size_t srcBlockIndex;
7362 size_t dstBlockIndex;
7363 VkDeviceSize srcOffset;
7364 VkDeviceSize dstOffset;
7367 VmaDeviceMemoryBlock* pSrcBlock;
7368 VmaDeviceMemoryBlock* pDstBlock;
7371 class VmaDefragmentationAlgorithm;
7379 struct VmaBlockVector
7381 VMA_CLASS_NO_COPY(VmaBlockVector)
7386 uint32_t memoryTypeIndex,
7387 VkDeviceSize preferredBlockSize,
7388 size_t minBlockCount,
7389 size_t maxBlockCount,
7390 VkDeviceSize bufferImageGranularity,
7391 uint32_t frameInUseCount,
7392 bool explicitBlockSize,
7395 VkDeviceSize minAllocationAlignment,
7396 void* pMemoryAllocateNext);
7399 VkResult CreateMinBlocks();
7401 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7402 VmaPool GetParentPool()
const {
return m_hParentPool; }
7403 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7404 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7405 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7406 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7407 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7408 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7413 bool IsCorruptionDetectionEnabled()
const;
7416 uint32_t currentFrameIndex,
7418 VkDeviceSize alignment,
7420 VmaSuballocationType suballocType,
7421 size_t allocationCount,
7429 #if VMA_STATS_STRING_ENABLED
7430 void PrintDetailedMap(
class VmaJsonWriter& json);
7433 void MakePoolAllocationsLost(
7434 uint32_t currentFrameIndex,
7435 size_t* pLostAllocationCount);
7436 VkResult CheckCorruption();
7440 class VmaBlockVectorDefragmentationContext* pCtx,
7442 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7443 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7444 VkCommandBuffer commandBuffer);
7445 void DefragmentationEnd(
7446 class VmaBlockVectorDefragmentationContext* pCtx,
7450 uint32_t ProcessDefragmentations(
7451 class VmaBlockVectorDefragmentationContext *pCtx,
7454 void CommitDefragmentations(
7455 class VmaBlockVectorDefragmentationContext *pCtx,
7461 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7462 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7463 size_t CalcAllocationCount()
const;
7464 bool IsBufferImageGranularityConflictPossible()
const;
7467 friend class VmaDefragmentationAlgorithm_Generic;
7471 const uint32_t m_MemoryTypeIndex;
7472 const VkDeviceSize m_PreferredBlockSize;
7473 const size_t m_MinBlockCount;
7474 const size_t m_MaxBlockCount;
7475 const VkDeviceSize m_BufferImageGranularity;
7476 const uint32_t m_FrameInUseCount;
7477 const bool m_ExplicitBlockSize;
7478 const uint32_t m_Algorithm;
7479 const float m_Priority;
7480 const VkDeviceSize m_MinAllocationAlignment;
7481 void*
const m_pMemoryAllocateNext;
7482 VMA_RW_MUTEX m_Mutex;
7486 bool m_HasEmptyBlock;
7488 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7489 uint32_t m_NextBlockId;
7491 VkDeviceSize CalcMaxBlockSize()
const;
7494 void Remove(VmaDeviceMemoryBlock* pBlock);
7498 void IncrementallySortBlocks();
7500 VkResult AllocatePage(
7501 uint32_t currentFrameIndex,
7503 VkDeviceSize alignment,
7505 VmaSuballocationType suballocType,
7509 VkResult AllocateFromBlock(
7510 VmaDeviceMemoryBlock* pBlock,
7511 uint32_t currentFrameIndex,
7513 VkDeviceSize alignment,
7516 VmaSuballocationType suballocType,
7520 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7523 void ApplyDefragmentationMovesCpu(
7524 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7525 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7527 void ApplyDefragmentationMovesGpu(
7528 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7529 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7530 VkCommandBuffer commandBuffer);
7538 void UpdateHasEmptyBlock();
7543 VMA_CLASS_NO_COPY(VmaPool_T)
7545 VmaBlockVector m_BlockVector;
7550 VkDeviceSize preferredBlockSize);
7553 uint32_t GetId()
const {
return m_Id; }
7554 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7556 const char* GetName()
const {
return m_Name; }
7557 void SetName(
const char* pName);
7559 #if VMA_STATS_STRING_ENABLED
7566 VmaPool_T* m_PrevPool = VMA_NULL;
7567 VmaPool_T* m_NextPool = VMA_NULL;
7568 friend struct VmaPoolListItemTraits;
7571 struct VmaPoolListItemTraits
7573 typedef VmaPool_T ItemType;
7574 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
7575 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
7576 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
7577 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
7587 class VmaDefragmentationAlgorithm
7589 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7591 VmaDefragmentationAlgorithm(
7593 VmaBlockVector* pBlockVector,
7594 uint32_t currentFrameIndex) :
7595 m_hAllocator(hAllocator),
7596 m_pBlockVector(pBlockVector),
7597 m_CurrentFrameIndex(currentFrameIndex)
7600 virtual ~VmaDefragmentationAlgorithm()
7604 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7605 virtual void AddAll() = 0;
7607 virtual VkResult Defragment(
7608 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7609 VkDeviceSize maxBytesToMove,
7610 uint32_t maxAllocationsToMove,
7613 virtual VkDeviceSize GetBytesMoved()
const = 0;
7614 virtual uint32_t GetAllocationsMoved()
const = 0;
7618 VmaBlockVector*
const m_pBlockVector;
7619 const uint32_t m_CurrentFrameIndex;
7621 struct AllocationInfo
7624 VkBool32* m_pChanged;
7627 m_hAllocation(VK_NULL_HANDLE),
7628 m_pChanged(VMA_NULL)
7632 m_hAllocation(hAlloc),
7633 m_pChanged(pChanged)
7639 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7641 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7643 VmaDefragmentationAlgorithm_Generic(
7645 VmaBlockVector* pBlockVector,
7646 uint32_t currentFrameIndex,
7647 bool overlappingMoveSupported);
7648 virtual ~VmaDefragmentationAlgorithm_Generic();
7650 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7651 virtual void AddAll() { m_AllAllocations =
true; }
7653 virtual VkResult Defragment(
7654 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7655 VkDeviceSize maxBytesToMove,
7656 uint32_t maxAllocationsToMove,
7659 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7660 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7663 uint32_t m_AllocationCount;
7664 bool m_AllAllocations;
7666 VkDeviceSize m_BytesMoved;
7667 uint32_t m_AllocationsMoved;
7669 struct AllocationInfoSizeGreater
7671 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7673 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7677 struct AllocationInfoOffsetGreater
7679 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7681 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7687 size_t m_OriginalBlockIndex;
7688 VmaDeviceMemoryBlock* m_pBlock;
7689 bool m_HasNonMovableAllocations;
7690 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7692 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7693 m_OriginalBlockIndex(SIZE_MAX),
7695 m_HasNonMovableAllocations(true),
7696 m_Allocations(pAllocationCallbacks)
7700 void CalcHasNonMovableAllocations()
7702 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7703 const size_t defragmentAllocCount = m_Allocations.size();
7704 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7707 void SortAllocationsBySizeDescending()
7709 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7712 void SortAllocationsByOffsetDescending()
7714 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7718 struct BlockPointerLess
7720 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7722 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7724 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7726 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7732 struct BlockInfoCompareMoveDestination
7734 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7736 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7740 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7744 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7752 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7753 BlockInfoVector m_Blocks;
7755 VkResult DefragmentRound(
7756 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7757 VkDeviceSize maxBytesToMove,
7758 uint32_t maxAllocationsToMove,
7759 bool freeOldAllocations);
7761 size_t CalcBlocksWithNonMovableCount()
const;
7763 static bool MoveMakesSense(
7764 size_t dstBlockIndex, VkDeviceSize dstOffset,
7765 size_t srcBlockIndex, VkDeviceSize srcOffset);
7768 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7770 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7772 VmaDefragmentationAlgorithm_Fast(
7774 VmaBlockVector* pBlockVector,
7775 uint32_t currentFrameIndex,
7776 bool overlappingMoveSupported);
7777 virtual ~VmaDefragmentationAlgorithm_Fast();
7779 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7780 virtual void AddAll() { m_AllAllocations =
true; }
7782 virtual VkResult Defragment(
7783 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7784 VkDeviceSize maxBytesToMove,
7785 uint32_t maxAllocationsToMove,
7788 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7789 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7794 size_t origBlockIndex;
7797 class FreeSpaceDatabase
7803 s.blockInfoIndex = SIZE_MAX;
7804 for(
size_t i = 0; i < MAX_COUNT; ++i)
7806 m_FreeSpaces[i] = s;
7810 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7812 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7818 size_t bestIndex = SIZE_MAX;
7819 for(
size_t i = 0; i < MAX_COUNT; ++i)
7822 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7827 if(m_FreeSpaces[i].size < size &&
7828 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7834 if(bestIndex != SIZE_MAX)
7836 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7837 m_FreeSpaces[bestIndex].offset = offset;
7838 m_FreeSpaces[bestIndex].size = size;
7842 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7843 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7845 size_t bestIndex = SIZE_MAX;
7846 VkDeviceSize bestFreeSpaceAfter = 0;
7847 for(
size_t i = 0; i < MAX_COUNT; ++i)
7850 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7852 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7854 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7856 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7858 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7861 bestFreeSpaceAfter = freeSpaceAfter;
7867 if(bestIndex != SIZE_MAX)
7869 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7870 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7872 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7875 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7876 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7877 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7882 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7892 static const size_t MAX_COUNT = 4;
7896 size_t blockInfoIndex;
7897 VkDeviceSize offset;
7899 } m_FreeSpaces[MAX_COUNT];
7902 const bool m_OverlappingMoveSupported;
7904 uint32_t m_AllocationCount;
7905 bool m_AllAllocations;
7907 VkDeviceSize m_BytesMoved;
7908 uint32_t m_AllocationsMoved;
7910 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7912 void PreprocessMetadata();
7913 void PostprocessMetadata();
7914 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7917 struct VmaBlockDefragmentationContext
7921 BLOCK_FLAG_USED = 0x00000001,
7927 class VmaBlockVectorDefragmentationContext
7929 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7933 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7934 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7935 uint32_t defragmentationMovesProcessed;
7936 uint32_t defragmentationMovesCommitted;
7937 bool hasDefragmentationPlan;
7939 VmaBlockVectorDefragmentationContext(
7942 VmaBlockVector* pBlockVector,
7943 uint32_t currFrameIndex);
7944 ~VmaBlockVectorDefragmentationContext();
7946 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7947 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7948 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7950 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7951 void AddAll() { m_AllAllocations =
true; }
7960 VmaBlockVector*
const m_pBlockVector;
7961 const uint32_t m_CurrFrameIndex;
7963 VmaDefragmentationAlgorithm* m_pAlgorithm;
7971 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7972 bool m_AllAllocations;
7975 struct VmaDefragmentationContext_T
7978 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7980 VmaDefragmentationContext_T(
7982 uint32_t currFrameIndex,
7985 ~VmaDefragmentationContext_T();
7987 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7988 void AddAllocations(
7989 uint32_t allocationCount,
7991 VkBool32* pAllocationsChanged);
7999 VkResult Defragment(
8000 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
8001 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
8005 VkResult DefragmentPassEnd();
8009 const uint32_t m_CurrFrameIndex;
8010 const uint32_t m_Flags;
8013 VkDeviceSize m_MaxCpuBytesToMove;
8014 uint32_t m_MaxCpuAllocationsToMove;
8015 VkDeviceSize m_MaxGpuBytesToMove;
8016 uint32_t m_MaxGpuAllocationsToMove;
8019 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
8021 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
8024 #if VMA_RECORDING_ENABLED
8031 void WriteConfiguration(
8032 const VkPhysicalDeviceProperties& devProps,
8033 const VkPhysicalDeviceMemoryProperties& memProps,
8034 uint32_t vulkanApiVersion,
8035 bool dedicatedAllocationExtensionEnabled,
8036 bool bindMemory2ExtensionEnabled,
8037 bool memoryBudgetExtensionEnabled,
8038 bool deviceCoherentMemoryExtensionEnabled);
8041 void RecordCreateAllocator(uint32_t frameIndex);
8042 void RecordDestroyAllocator(uint32_t frameIndex);
8043 void RecordCreatePool(uint32_t frameIndex,
8046 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
8047 void RecordAllocateMemory(uint32_t frameIndex,
8048 const VkMemoryRequirements& vkMemReq,
8051 void RecordAllocateMemoryPages(uint32_t frameIndex,
8052 const VkMemoryRequirements& vkMemReq,
8054 uint64_t allocationCount,
8056 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
8057 const VkMemoryRequirements& vkMemReq,
8058 bool requiresDedicatedAllocation,
8059 bool prefersDedicatedAllocation,
8062 void RecordAllocateMemoryForImage(uint32_t frameIndex,
8063 const VkMemoryRequirements& vkMemReq,
8064 bool requiresDedicatedAllocation,
8065 bool prefersDedicatedAllocation,
8068 void RecordFreeMemory(uint32_t frameIndex,
8070 void RecordFreeMemoryPages(uint32_t frameIndex,
8071 uint64_t allocationCount,
8073 void RecordSetAllocationUserData(uint32_t frameIndex,
8075 const void* pUserData);
8076 void RecordCreateLostAllocation(uint32_t frameIndex,
8078 void RecordMapMemory(uint32_t frameIndex,
8080 void RecordUnmapMemory(uint32_t frameIndex,
8082 void RecordFlushAllocation(uint32_t frameIndex,
8083 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8084 void RecordInvalidateAllocation(uint32_t frameIndex,
8085 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8086 void RecordCreateBuffer(uint32_t frameIndex,
8087 const VkBufferCreateInfo& bufCreateInfo,
8090 void RecordCreateImage(uint32_t frameIndex,
8091 const VkImageCreateInfo& imageCreateInfo,
8094 void RecordDestroyBuffer(uint32_t frameIndex,
8096 void RecordDestroyImage(uint32_t frameIndex,
8098 void RecordTouchAllocation(uint32_t frameIndex,
8100 void RecordGetAllocationInfo(uint32_t frameIndex,
8102 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
8104 void RecordDefragmentationBegin(uint32_t frameIndex,
8107 void RecordDefragmentationEnd(uint32_t frameIndex,
8109 void RecordSetPoolName(uint32_t frameIndex,
8120 class UserDataString
8124 const char* GetString()
const {
return m_Str; }
8134 VMA_MUTEX m_FileMutex;
8135 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
8137 void GetBasicParams(CallParams& outParams);
8140 template<
typename T>
8141 void PrintPointerList(uint64_t count,
const T* pItems)
8145 fprintf(m_File,
"%p", pItems[0]);
8146 for(uint64_t i = 1; i < count; ++i)
8148 fprintf(m_File,
" %p", pItems[i]);
8153 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
8162 class VmaAllocationObjectAllocator
8164 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
8166 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
8168 template<
typename... Types>
VmaAllocation Allocate(Types... args);
8173 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
8176 struct VmaCurrentBudgetData
8178 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
8179 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
8181 #if VMA_MEMORY_BUDGET
8182 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
8183 VMA_RW_MUTEX m_BudgetMutex;
8184 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
8185 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
8186 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
8189 VmaCurrentBudgetData()
8191 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
8193 m_BlockBytes[heapIndex] = 0;
8194 m_AllocationBytes[heapIndex] = 0;
8195 #if VMA_MEMORY_BUDGET
8196 m_VulkanUsage[heapIndex] = 0;
8197 m_VulkanBudget[heapIndex] = 0;
8198 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
8202 #if VMA_MEMORY_BUDGET
8203 m_OperationsSinceBudgetFetch = 0;
8207 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8209 m_AllocationBytes[heapIndex] += allocationSize;
8210 #if VMA_MEMORY_BUDGET
8211 ++m_OperationsSinceBudgetFetch;
8215 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8217 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
8218 m_AllocationBytes[heapIndex] -= allocationSize;
8219 #if VMA_MEMORY_BUDGET
8220 ++m_OperationsSinceBudgetFetch;
8226 struct VmaAllocator_T
8228 VMA_CLASS_NO_COPY(VmaAllocator_T)
8231 uint32_t m_VulkanApiVersion;
8232 bool m_UseKhrDedicatedAllocation;
8233 bool m_UseKhrBindMemory2;
8234 bool m_UseExtMemoryBudget;
8235 bool m_UseAmdDeviceCoherentMemory;
8236 bool m_UseKhrBufferDeviceAddress;
8237 bool m_UseExtMemoryPriority;
8239 VkInstance m_hInstance;
8240 bool m_AllocationCallbacksSpecified;
8241 VkAllocationCallbacks m_AllocationCallbacks;
8243 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
8246 uint32_t m_HeapSizeLimitMask;
8248 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
8249 VkPhysicalDeviceMemoryProperties m_MemProps;
8252 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
8254 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
8255 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
8256 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
8258 VmaCurrentBudgetData m_Budget;
8259 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
8265 const VkAllocationCallbacks* GetAllocationCallbacks()
const
8267 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
8271 return m_VulkanFunctions;
8274 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
8276 VkDeviceSize GetBufferImageGranularity()
const
8279 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
8280 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
8283 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
8284 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
8286 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
8288 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
8289 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
8292 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
8294 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
8295 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8298 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
8300 return IsMemoryTypeNonCoherent(memTypeIndex) ?
8301 VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
8302 (VkDeviceSize)VMA_MIN_ALIGNMENT;
8305 bool IsIntegratedGpu()
const
8307 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
8310 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
8312 #if VMA_RECORDING_ENABLED
8313 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
8316 void GetBufferMemoryRequirements(
8318 VkMemoryRequirements& memReq,
8319 bool& requiresDedicatedAllocation,
8320 bool& prefersDedicatedAllocation)
const;
8321 void GetImageMemoryRequirements(
8323 VkMemoryRequirements& memReq,
8324 bool& requiresDedicatedAllocation,
8325 bool& prefersDedicatedAllocation)
const;
8328 VkResult AllocateMemory(
8329 const VkMemoryRequirements& vkMemReq,
8330 bool requiresDedicatedAllocation,
8331 bool prefersDedicatedAllocation,
8332 VkBuffer dedicatedBuffer,
8333 VkBufferUsageFlags dedicatedBufferUsage,
8334 VkImage dedicatedImage,
8336 VmaSuballocationType suballocType,
8337 size_t allocationCount,
8342 size_t allocationCount,
8345 void CalculateStats(
VmaStats* pStats);
8348 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
8350 #if VMA_STATS_STRING_ENABLED
8351 void PrintDetailedMap(
class VmaJsonWriter& json);
8354 VkResult DefragmentationBegin(
8358 VkResult DefragmentationEnd(
8361 VkResult DefragmentationPassBegin(
8364 VkResult DefragmentationPassEnd(
8371 void DestroyPool(
VmaPool pool);
8374 void SetCurrentFrameIndex(uint32_t frameIndex);
8375 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8377 void MakePoolAllocationsLost(
8379 size_t* pLostAllocationCount);
8380 VkResult CheckPoolCorruption(
VmaPool hPool);
8381 VkResult CheckCorruption(uint32_t memoryTypeBits);
8386 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8388 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8390 VkResult BindVulkanBuffer(
8391 VkDeviceMemory memory,
8392 VkDeviceSize memoryOffset,
8396 VkResult BindVulkanImage(
8397 VkDeviceMemory memory,
8398 VkDeviceSize memoryOffset,
8405 VkResult BindBufferMemory(
8407 VkDeviceSize allocationLocalOffset,
8410 VkResult BindImageMemory(
8412 VkDeviceSize allocationLocalOffset,
8416 VkResult FlushOrInvalidateAllocation(
8418 VkDeviceSize offset, VkDeviceSize size,
8419 VMA_CACHE_OPERATION op);
8420 VkResult FlushOrInvalidateAllocations(
8421 uint32_t allocationCount,
8423 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8424 VMA_CACHE_OPERATION op);
8426 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8432 uint32_t GetGpuDefragmentationMemoryTypeBits();
8434 #if VMA_EXTERNAL_MEMORY
8435 VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex)
const
8437 return m_TypeExternalMemoryHandleTypes[memTypeIndex];
8442 VkDeviceSize m_PreferredLargeHeapBlockSize;
8444 VkPhysicalDevice m_PhysicalDevice;
8445 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8446 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8447 #if VMA_EXTERNAL_MEMORY
8448 VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES];
8451 VMA_RW_MUTEX m_PoolsMutex;
8452 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
8455 uint32_t m_NextPoolId;
8460 uint32_t m_GlobalMemoryTypeBits;
8462 #if VMA_RECORDING_ENABLED
8463 VmaRecorder* m_pRecorder;
8468 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8469 void ImportVulkanFunctions_Static();
8474 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8475 void ImportVulkanFunctions_Dynamic();
8478 void ValidateVulkanFunctions();
8480 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8482 VkResult AllocateMemoryOfType(
8484 VkDeviceSize alignment,
8485 bool dedicatedAllocation,
8486 VkBuffer dedicatedBuffer,
8487 VkBufferUsageFlags dedicatedBufferUsage,
8488 VkImage dedicatedImage,
8490 uint32_t memTypeIndex,
8491 VmaSuballocationType suballocType,
8492 size_t allocationCount,
8496 VkResult AllocateDedicatedMemoryPage(
8498 VmaSuballocationType suballocType,
8499 uint32_t memTypeIndex,
8500 const VkMemoryAllocateInfo& allocInfo,
8502 bool isUserDataString,
8507 VkResult AllocateDedicatedMemory(
8509 VmaSuballocationType suballocType,
8510 uint32_t memTypeIndex,
8513 bool isUserDataString,
8516 VkBuffer dedicatedBuffer,
8517 VkBufferUsageFlags dedicatedBufferUsage,
8518 VkImage dedicatedImage,
8519 size_t allocationCount,
8528 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8530 uint32_t CalculateGlobalMemoryTypeBits()
const;
8532 bool GetFlushOrInvalidateRange(
8534 VkDeviceSize offset, VkDeviceSize size,
8535 VkMappedMemoryRange& outRange)
const;
8537 #if VMA_MEMORY_BUDGET
8538 void UpdateVulkanBudget();
8545 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8547 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8550 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8552 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8555 template<
typename T>
8558 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8561 template<
typename T>
8562 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8564 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8567 template<
typename T>
8568 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8573 VmaFree(hAllocator, ptr);
8577 template<
typename T>
8578 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8582 for(
size_t i = count; i--; )
8584 VmaFree(hAllocator, ptr);
8591 #if VMA_STATS_STRING_ENABLED
8593 class VmaStringBuilder
8596 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8597 size_t GetLength()
const {
return m_Data.size(); }
8598 const char* GetData()
const {
return m_Data.data(); }
8600 void Add(
char ch) { m_Data.push_back(ch); }
8601 void Add(
const char* pStr);
8602 void AddNewLine() { Add(
'\n'); }
8603 void AddNumber(uint32_t num);
8604 void AddNumber(uint64_t num);
8605 void AddPointer(
const void* ptr);
8608 VmaVector< char, VmaStlAllocator<char> > m_Data;
8611 void VmaStringBuilder::Add(
const char* pStr)
8613 const size_t strLen = strlen(pStr);
8616 const size_t oldCount = m_Data.size();
8617 m_Data.resize(oldCount + strLen);
8618 memcpy(m_Data.data() + oldCount, pStr, strLen);
8622 void VmaStringBuilder::AddNumber(uint32_t num)
8629 *--p =
'0' + (num % 10);
8636 void VmaStringBuilder::AddNumber(uint64_t num)
8643 *--p =
'0' + (num % 10);
8650 void VmaStringBuilder::AddPointer(
const void* ptr)
8653 VmaPtrToStr(buf,
sizeof(buf), ptr);
8662 #if VMA_STATS_STRING_ENABLED
8666 VMA_CLASS_NO_COPY(VmaJsonWriter)
8668 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8671 void BeginObject(
bool singleLine =
false);
8674 void BeginArray(
bool singleLine =
false);
8677 void WriteString(
const char* pStr);
8678 void BeginString(
const char* pStr = VMA_NULL);
8679 void ContinueString(
const char* pStr);
8680 void ContinueString(uint32_t n);
8681 void ContinueString(uint64_t n);
8682 void ContinueString_Pointer(
const void* ptr);
8683 void EndString(
const char* pStr = VMA_NULL);
8685 void WriteNumber(uint32_t n);
8686 void WriteNumber(uint64_t n);
8687 void WriteBool(
bool b);
8691 static const char*
const INDENT;
8693 enum COLLECTION_TYPE
8695 COLLECTION_TYPE_OBJECT,
8696 COLLECTION_TYPE_ARRAY,
8700 COLLECTION_TYPE type;
8701 uint32_t valueCount;
8702 bool singleLineMode;
8705 VmaStringBuilder& m_SB;
8706 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8707 bool m_InsideString;
8709 void BeginValue(
bool isString);
8710 void WriteIndent(
bool oneLess =
false);
8713 const char*
const VmaJsonWriter::INDENT =
" ";
8715 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8717 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8718 m_InsideString(false)
8722 VmaJsonWriter::~VmaJsonWriter()
8724 VMA_ASSERT(!m_InsideString);
8725 VMA_ASSERT(m_Stack.empty());
8728 void VmaJsonWriter::BeginObject(
bool singleLine)
8730 VMA_ASSERT(!m_InsideString);
8736 item.type = COLLECTION_TYPE_OBJECT;
8737 item.valueCount = 0;
8738 item.singleLineMode = singleLine;
8739 m_Stack.push_back(item);
8742 void VmaJsonWriter::EndObject()
8744 VMA_ASSERT(!m_InsideString);
8749 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8753 void VmaJsonWriter::BeginArray(
bool singleLine)
8755 VMA_ASSERT(!m_InsideString);
8761 item.type = COLLECTION_TYPE_ARRAY;
8762 item.valueCount = 0;
8763 item.singleLineMode = singleLine;
8764 m_Stack.push_back(item);
8767 void VmaJsonWriter::EndArray()
8769 VMA_ASSERT(!m_InsideString);
8774 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8778 void VmaJsonWriter::WriteString(
const char* pStr)
8784 void VmaJsonWriter::BeginString(
const char* pStr)
8786 VMA_ASSERT(!m_InsideString);
8790 m_InsideString =
true;
8791 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8793 ContinueString(pStr);
8797 void VmaJsonWriter::ContinueString(
const char* pStr)
8799 VMA_ASSERT(m_InsideString);
8801 const size_t strLen = strlen(pStr);
8802 for(
size_t i = 0; i < strLen; ++i)
8835 VMA_ASSERT(0 &&
"Character not currently supported.");
8841 void VmaJsonWriter::ContinueString(uint32_t n)
8843 VMA_ASSERT(m_InsideString);
8847 void VmaJsonWriter::ContinueString(uint64_t n)
8849 VMA_ASSERT(m_InsideString);
8853 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8855 VMA_ASSERT(m_InsideString);
8856 m_SB.AddPointer(ptr);
8859 void VmaJsonWriter::EndString(
const char* pStr)
8861 VMA_ASSERT(m_InsideString);
8862 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8864 ContinueString(pStr);
8867 m_InsideString =
false;
8870 void VmaJsonWriter::WriteNumber(uint32_t n)
8872 VMA_ASSERT(!m_InsideString);
8877 void VmaJsonWriter::WriteNumber(uint64_t n)
8879 VMA_ASSERT(!m_InsideString);
8884 void VmaJsonWriter::WriteBool(
bool b)
8886 VMA_ASSERT(!m_InsideString);
8888 m_SB.Add(b ?
"true" :
"false");
8891 void VmaJsonWriter::WriteNull()
8893 VMA_ASSERT(!m_InsideString);
8898 void VmaJsonWriter::BeginValue(
bool isString)
8900 if(!m_Stack.empty())
8902 StackItem& currItem = m_Stack.back();
8903 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8904 currItem.valueCount % 2 == 0)
8906 VMA_ASSERT(isString);
8909 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8910 currItem.valueCount % 2 != 0)
8914 else if(currItem.valueCount > 0)
8923 ++currItem.valueCount;
8927 void VmaJsonWriter::WriteIndent(
bool oneLess)
8929 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8933 size_t count = m_Stack.size();
8934 if(count > 0 && oneLess)
8938 for(
size_t i = 0; i < count; ++i)
8949 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8951 if(IsUserDataString())
8953 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8955 FreeUserDataString(hAllocator);
8957 if(pUserData != VMA_NULL)
8959 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8964 m_pUserData = pUserData;
8968 void VmaAllocation_T::ChangeBlockAllocation(
8970 VmaDeviceMemoryBlock* block,
8971 VkDeviceSize offset)
8973 VMA_ASSERT(block != VMA_NULL);
8974 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8977 if(block != m_BlockAllocation.m_Block)
8979 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8980 if(IsPersistentMap())
8982 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8983 block->Map(hAllocator, mapRefCount, VMA_NULL);
8986 m_BlockAllocation.m_Block = block;
8987 m_BlockAllocation.m_Offset = offset;
8990 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8992 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8993 m_BlockAllocation.m_Offset = newOffset;
8996 VkDeviceSize VmaAllocation_T::GetOffset()
const
9000 case ALLOCATION_TYPE_BLOCK:
9001 return m_BlockAllocation.m_Offset;
9002 case ALLOCATION_TYPE_DEDICATED:
9010 VkDeviceMemory VmaAllocation_T::GetMemory()
const
9014 case ALLOCATION_TYPE_BLOCK:
9015 return m_BlockAllocation.m_Block->GetDeviceMemory();
9016 case ALLOCATION_TYPE_DEDICATED:
9017 return m_DedicatedAllocation.m_hMemory;
9020 return VK_NULL_HANDLE;
9024 void* VmaAllocation_T::GetMappedData()
const
9028 case ALLOCATION_TYPE_BLOCK:
9031 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
9032 VMA_ASSERT(pBlockData != VMA_NULL);
9033 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
9040 case ALLOCATION_TYPE_DEDICATED:
9041 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
9042 return m_DedicatedAllocation.m_pMappedData;
9049 bool VmaAllocation_T::CanBecomeLost()
const
9053 case ALLOCATION_TYPE_BLOCK:
9054 return m_BlockAllocation.m_CanBecomeLost;
9055 case ALLOCATION_TYPE_DEDICATED:
9063 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9065 VMA_ASSERT(CanBecomeLost());
9071 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
9074 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9079 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
9085 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
9095 #if VMA_STATS_STRING_ENABLED
9098 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
9107 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
9109 json.WriteString(
"Type");
9110 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
9112 json.WriteString(
"Size");
9113 json.WriteNumber(m_Size);
9115 if(m_pUserData != VMA_NULL)
9117 json.WriteString(
"UserData");
9118 if(IsUserDataString())
9120 json.WriteString((
const char*)m_pUserData);
9125 json.ContinueString_Pointer(m_pUserData);
9130 json.WriteString(
"CreationFrameIndex");
9131 json.WriteNumber(m_CreationFrameIndex);
9133 json.WriteString(
"LastUseFrameIndex");
9134 json.WriteNumber(GetLastUseFrameIndex());
9136 if(m_BufferImageUsage != 0)
9138 json.WriteString(
"Usage");
9139 json.WriteNumber(m_BufferImageUsage);
9145 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
9147 VMA_ASSERT(IsUserDataString());
9148 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
9149 m_pUserData = VMA_NULL;
9152 void VmaAllocation_T::BlockAllocMap()
9154 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9156 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9162 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
9166 void VmaAllocation_T::BlockAllocUnmap()
9168 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9170 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9176 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
9180 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
9182 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9186 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9188 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
9189 *ppData = m_DedicatedAllocation.m_pMappedData;
9195 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
9196 return VK_ERROR_MEMORY_MAP_FAILED;
9201 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9202 hAllocator->m_hDevice,
9203 m_DedicatedAllocation.m_hMemory,
9208 if(result == VK_SUCCESS)
9210 m_DedicatedAllocation.m_pMappedData = *ppData;
9217 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
9219 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9221 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9226 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
9227 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
9228 hAllocator->m_hDevice,
9229 m_DedicatedAllocation.m_hMemory);
9234 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
9238 #if VMA_STATS_STRING_ENABLED
9240 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
9244 json.WriteString(
"Blocks");
9247 json.WriteString(
"Allocations");
9250 json.WriteString(
"UnusedRanges");
9253 json.WriteString(
"UsedBytes");
9256 json.WriteString(
"UnusedBytes");
9261 json.WriteString(
"AllocationSize");
9262 json.BeginObject(
true);
9263 json.WriteString(
"Min");
9265 json.WriteString(
"Avg");
9267 json.WriteString(
"Max");
9274 json.WriteString(
"UnusedRangeSize");
9275 json.BeginObject(
true);
9276 json.WriteString(
"Min");
9278 json.WriteString(
"Avg");
9280 json.WriteString(
"Max");
9290 struct VmaSuballocationItemSizeLess
9293 const VmaSuballocationList::iterator lhs,
9294 const VmaSuballocationList::iterator rhs)
const
9296 return lhs->size < rhs->size;
9299 const VmaSuballocationList::iterator lhs,
9300 VkDeviceSize rhsSize)
const
9302 return lhs->size < rhsSize;
9310 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
9312 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
9316 #if VMA_STATS_STRING_ENABLED
9318 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
9319 VkDeviceSize unusedBytes,
9320 size_t allocationCount,
9321 size_t unusedRangeCount)
const
9325 json.WriteString(
"TotalBytes");
9326 json.WriteNumber(GetSize());
9328 json.WriteString(
"UnusedBytes");
9329 json.WriteNumber(unusedBytes);
9331 json.WriteString(
"Allocations");
9332 json.WriteNumber((uint64_t)allocationCount);
9334 json.WriteString(
"UnusedRanges");
9335 json.WriteNumber((uint64_t)unusedRangeCount);
9337 json.WriteString(
"Suballocations");
9341 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
9342 VkDeviceSize offset,
9345 json.BeginObject(
true);
9347 json.WriteString(
"Offset");
9348 json.WriteNumber(offset);
9350 hAllocation->PrintParameters(json);
9355 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
9356 VkDeviceSize offset,
9357 VkDeviceSize size)
const
9359 json.BeginObject(
true);
9361 json.WriteString(
"Offset");
9362 json.WriteNumber(offset);
9364 json.WriteString(
"Type");
9365 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
9367 json.WriteString(
"Size");
9368 json.WriteNumber(size);
9373 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
9384 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
9385 VmaBlockMetadata(hAllocator),
9388 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9389 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9393 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9397 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9399 VmaBlockMetadata::Init(size);
9402 m_SumFreeSize = size;
9404 VmaSuballocation suballoc = {};
9405 suballoc.offset = 0;
9406 suballoc.size = size;
9407 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9408 suballoc.hAllocation = VK_NULL_HANDLE;
9410 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9411 m_Suballocations.push_back(suballoc);
9412 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9414 m_FreeSuballocationsBySize.push_back(suballocItem);
9417 bool VmaBlockMetadata_Generic::Validate()
const
9419 VMA_VALIDATE(!m_Suballocations.empty());
9422 VkDeviceSize calculatedOffset = 0;
9424 uint32_t calculatedFreeCount = 0;
9426 VkDeviceSize calculatedSumFreeSize = 0;
9429 size_t freeSuballocationsToRegister = 0;
9431 bool prevFree =
false;
9433 for(
const auto& subAlloc : m_Suballocations)
9436 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9438 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9440 VMA_VALIDATE(!prevFree || !currFree);
9442 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9446 calculatedSumFreeSize += subAlloc.size;
9447 ++calculatedFreeCount;
9448 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9450 ++freeSuballocationsToRegister;
9454 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9458 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9459 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9462 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9465 calculatedOffset += subAlloc.size;
9466 prevFree = currFree;
9471 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9473 VkDeviceSize lastSize = 0;
9474 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9476 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9479 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9481 VMA_VALIDATE(suballocItem->size >= lastSize);
9483 lastSize = suballocItem->size;
9487 VMA_VALIDATE(ValidateFreeSuballocationList());
9488 VMA_VALIDATE(calculatedOffset == GetSize());
9489 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9490 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9495 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9497 if(!m_FreeSuballocationsBySize.empty())
9499 return m_FreeSuballocationsBySize.back()->size;
9507 bool VmaBlockMetadata_Generic::IsEmpty()
const
9509 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9512 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9516 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9528 for(
const auto& suballoc : m_Suballocations)
9530 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9543 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9545 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9547 inoutStats.
size += GetSize();
9554 #if VMA_STATS_STRING_ENABLED
9556 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9558 PrintDetailedMap_Begin(json,
9560 m_Suballocations.size() - (
size_t)m_FreeCount,
9564 for(
const auto& suballoc : m_Suballocations)
9566 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9568 PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size);
9572 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9576 PrintDetailedMap_End(json);
9581 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9582 uint32_t currentFrameIndex,
9583 uint32_t frameInUseCount,
9584 VkDeviceSize bufferImageGranularity,
9585 VkDeviceSize allocSize,
9586 VkDeviceSize allocAlignment,
9588 VmaSuballocationType allocType,
9589 bool canMakeOtherLost,
9591 VmaAllocationRequest* pAllocationRequest)
9593 VMA_ASSERT(allocSize > 0);
9594 VMA_ASSERT(!upperAddress);
9595 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9596 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9597 VMA_HEAVY_ASSERT(Validate());
9599 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9602 if(canMakeOtherLost ==
false &&
9603 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9609 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9610 if(freeSuballocCount > 0)
9615 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9616 m_FreeSuballocationsBySize.data(),
9617 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9618 allocSize + 2 * VMA_DEBUG_MARGIN,
9619 VmaSuballocationItemSizeLess());
9620 size_t index = it - m_FreeSuballocationsBySize.data();
9621 for(; index < freeSuballocCount; ++index)
9626 bufferImageGranularity,
9630 m_FreeSuballocationsBySize[index],
9632 &pAllocationRequest->offset,
9633 &pAllocationRequest->itemsToMakeLostCount,
9634 &pAllocationRequest->sumFreeSize,
9635 &pAllocationRequest->sumItemSize))
9637 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9642 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9644 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9645 it != m_Suballocations.end();
9648 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9651 bufferImageGranularity,
9657 &pAllocationRequest->offset,
9658 &pAllocationRequest->itemsToMakeLostCount,
9659 &pAllocationRequest->sumFreeSize,
9660 &pAllocationRequest->sumItemSize))
9662 pAllocationRequest->item = it;
9670 for(
size_t index = freeSuballocCount; index--; )
9675 bufferImageGranularity,
9679 m_FreeSuballocationsBySize[index],
9681 &pAllocationRequest->offset,
9682 &pAllocationRequest->itemsToMakeLostCount,
9683 &pAllocationRequest->sumFreeSize,
9684 &pAllocationRequest->sumItemSize))
9686 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9693 if(canMakeOtherLost)
9698 VmaAllocationRequest tmpAllocRequest = {};
9699 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9700 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9701 suballocIt != m_Suballocations.end();
9704 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9705 suballocIt->hAllocation->CanBecomeLost())
9710 bufferImageGranularity,
9716 &tmpAllocRequest.offset,
9717 &tmpAllocRequest.itemsToMakeLostCount,
9718 &tmpAllocRequest.sumFreeSize,
9719 &tmpAllocRequest.sumItemSize))
9723 *pAllocationRequest = tmpAllocRequest;
9724 pAllocationRequest->item = suballocIt;
9727 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9729 *pAllocationRequest = tmpAllocRequest;
9730 pAllocationRequest->item = suballocIt;
9743 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9744 uint32_t currentFrameIndex,
9745 uint32_t frameInUseCount,
9746 VmaAllocationRequest* pAllocationRequest)
9748 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9750 while(pAllocationRequest->itemsToMakeLostCount > 0)
9752 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9754 ++pAllocationRequest->item;
9756 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9757 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9758 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9759 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9761 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9762 --pAllocationRequest->itemsToMakeLostCount;
9770 VMA_HEAVY_ASSERT(Validate());
9771 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9772 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9777 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9779 uint32_t lostAllocationCount = 0;
9780 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9781 it != m_Suballocations.end();
9784 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9785 it->hAllocation->CanBecomeLost() &&
9786 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9788 it = FreeSuballocation(it);
9789 ++lostAllocationCount;
9792 return lostAllocationCount;
9795 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9797 for(
auto& suballoc : m_Suballocations)
9799 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9801 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9803 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9804 return VK_ERROR_VALIDATION_FAILED_EXT;
9806 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9808 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9809 return VK_ERROR_VALIDATION_FAILED_EXT;
9817 void VmaBlockMetadata_Generic::Alloc(
9818 const VmaAllocationRequest& request,
9819 VmaSuballocationType type,
9820 VkDeviceSize allocSize,
9823 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9824 VMA_ASSERT(request.item != m_Suballocations.end());
9825 VmaSuballocation& suballoc = *request.item;
9827 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9829 VMA_ASSERT(request.offset >= suballoc.offset);
9830 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9831 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9832 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9836 UnregisterFreeSuballocation(request.item);
9838 suballoc.offset = request.offset;
9839 suballoc.size = allocSize;
9840 suballoc.type = type;
9841 suballoc.hAllocation = hAllocation;
9846 VmaSuballocation paddingSuballoc = {};
9847 paddingSuballoc.offset = request.offset + allocSize;
9848 paddingSuballoc.size = paddingEnd;
9849 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9850 VmaSuballocationList::iterator next = request.item;
9852 const VmaSuballocationList::iterator paddingEndItem =
9853 m_Suballocations.insert(next, paddingSuballoc);
9854 RegisterFreeSuballocation(paddingEndItem);
9860 VmaSuballocation paddingSuballoc = {};
9861 paddingSuballoc.offset = request.offset - paddingBegin;
9862 paddingSuballoc.size = paddingBegin;
9863 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9864 const VmaSuballocationList::iterator paddingBeginItem =
9865 m_Suballocations.insert(request.item, paddingSuballoc);
9866 RegisterFreeSuballocation(paddingBeginItem);
9870 m_FreeCount = m_FreeCount - 1;
9871 if(paddingBegin > 0)
9879 m_SumFreeSize -= allocSize;
9882 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9884 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9885 suballocItem != m_Suballocations.end();
9888 VmaSuballocation& suballoc = *suballocItem;
9889 if(suballoc.hAllocation == allocation)
9891 FreeSuballocation(suballocItem);
9892 VMA_HEAVY_ASSERT(Validate());
9896 VMA_ASSERT(0 &&
"Not found!");
9899 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9901 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9902 suballocItem != m_Suballocations.end();
9905 VmaSuballocation& suballoc = *suballocItem;
9906 if(suballoc.offset == offset)
9908 FreeSuballocation(suballocItem);
9912 VMA_ASSERT(0 &&
"Not found!");
9915 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9917 VkDeviceSize lastSize = 0;
9918 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9920 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9922 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9923 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9924 VMA_VALIDATE(it->size >= lastSize);
9925 lastSize = it->size;
9930 bool VmaBlockMetadata_Generic::CheckAllocation(
9931 uint32_t currentFrameIndex,
9932 uint32_t frameInUseCount,
9933 VkDeviceSize bufferImageGranularity,
9934 VkDeviceSize allocSize,
9935 VkDeviceSize allocAlignment,
9936 VmaSuballocationType allocType,
9937 VmaSuballocationList::const_iterator suballocItem,
9938 bool canMakeOtherLost,
9939 VkDeviceSize* pOffset,
9940 size_t* itemsToMakeLostCount,
9941 VkDeviceSize* pSumFreeSize,
9942 VkDeviceSize* pSumItemSize)
const
9944 VMA_ASSERT(allocSize > 0);
9945 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9946 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9947 VMA_ASSERT(pOffset != VMA_NULL);
9949 *itemsToMakeLostCount = 0;
9953 if(canMakeOtherLost)
9955 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9957 *pSumFreeSize = suballocItem->size;
9961 if(suballocItem->hAllocation->CanBecomeLost() &&
9962 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9964 ++*itemsToMakeLostCount;
9965 *pSumItemSize = suballocItem->size;
9974 if(GetSize() - suballocItem->offset < allocSize)
9980 *pOffset = suballocItem->offset;
9983 if(VMA_DEBUG_MARGIN > 0)
9985 *pOffset += VMA_DEBUG_MARGIN;
9989 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9993 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
9995 bool bufferImageGranularityConflict =
false;
9996 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9997 while(prevSuballocItem != m_Suballocations.cbegin())
10000 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10001 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10003 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10005 bufferImageGranularityConflict =
true;
10013 if(bufferImageGranularityConflict)
10015 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10021 if(*pOffset >= suballocItem->offset + suballocItem->size)
10027 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
10030 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10032 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
10034 if(suballocItem->offset + totalSize > GetSize())
10041 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
10042 if(totalSize > suballocItem->size)
10044 VkDeviceSize remainingSize = totalSize - suballocItem->size;
10045 while(remainingSize > 0)
10047 ++lastSuballocItem;
10048 if(lastSuballocItem == m_Suballocations.cend())
10052 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10054 *pSumFreeSize += lastSuballocItem->size;
10058 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
10059 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
10060 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10062 ++*itemsToMakeLostCount;
10063 *pSumItemSize += lastSuballocItem->size;
10070 remainingSize = (lastSuballocItem->size < remainingSize) ?
10071 remainingSize - lastSuballocItem->size : 0;
10077 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10079 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
10080 ++nextSuballocItem;
10081 while(nextSuballocItem != m_Suballocations.cend())
10083 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10084 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10086 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10088 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
10089 if(nextSuballoc.hAllocation->CanBecomeLost() &&
10090 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10092 ++*itemsToMakeLostCount;
10105 ++nextSuballocItem;
10111 const VmaSuballocation& suballoc = *suballocItem;
10112 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10114 *pSumFreeSize = suballoc.size;
10117 if(suballoc.size < allocSize)
10123 *pOffset = suballoc.offset;
10126 if(VMA_DEBUG_MARGIN > 0)
10128 *pOffset += VMA_DEBUG_MARGIN;
10132 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
10136 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
10138 bool bufferImageGranularityConflict =
false;
10139 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
10140 while(prevSuballocItem != m_Suballocations.cbegin())
10142 --prevSuballocItem;
10143 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10144 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10146 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10148 bufferImageGranularityConflict =
true;
10156 if(bufferImageGranularityConflict)
10158 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10163 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
10166 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10169 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
10176 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10178 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
10179 ++nextSuballocItem;
10180 while(nextSuballocItem != m_Suballocations.cend())
10182 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10183 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10185 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10195 ++nextSuballocItem;
10204 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
10206 VMA_ASSERT(item != m_Suballocations.end());
10207 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10209 VmaSuballocationList::iterator nextItem = item;
10211 VMA_ASSERT(nextItem != m_Suballocations.end());
10212 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
10214 item->size += nextItem->size;
10216 m_Suballocations.erase(nextItem);
10219 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
10222 VmaSuballocation& suballoc = *suballocItem;
10223 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10224 suballoc.hAllocation = VK_NULL_HANDLE;
10228 m_SumFreeSize += suballoc.size;
10231 bool mergeWithNext =
false;
10232 bool mergeWithPrev =
false;
10234 VmaSuballocationList::iterator nextItem = suballocItem;
10236 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
10238 mergeWithNext =
true;
10241 VmaSuballocationList::iterator prevItem = suballocItem;
10242 if(suballocItem != m_Suballocations.begin())
10245 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10247 mergeWithPrev =
true;
10253 UnregisterFreeSuballocation(nextItem);
10254 MergeFreeWithNext(suballocItem);
10259 UnregisterFreeSuballocation(prevItem);
10260 MergeFreeWithNext(prevItem);
10261 RegisterFreeSuballocation(prevItem);
10266 RegisterFreeSuballocation(suballocItem);
10267 return suballocItem;
10271 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
10273 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10274 VMA_ASSERT(item->size > 0);
10278 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10280 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10282 if(m_FreeSuballocationsBySize.empty())
10284 m_FreeSuballocationsBySize.push_back(item);
10288 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
10296 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
10298 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10299 VMA_ASSERT(item->size > 0);
10303 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10305 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10307 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
10308 m_FreeSuballocationsBySize.data(),
10309 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
10311 VmaSuballocationItemSizeLess());
10312 for(
size_t index = it - m_FreeSuballocationsBySize.data();
10313 index < m_FreeSuballocationsBySize.size();
10316 if(m_FreeSuballocationsBySize[index] == item)
10318 VmaVectorRemove(m_FreeSuballocationsBySize, index);
10321 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
10323 VMA_ASSERT(0 &&
"Not found.");
10329 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
10330 VkDeviceSize bufferImageGranularity,
10331 VmaSuballocationType& inOutPrevSuballocType)
const
10333 if(bufferImageGranularity == 1 || IsEmpty())
10338 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
10339 bool typeConflictFound =
false;
10340 for(
const auto& suballoc : m_Suballocations)
10342 const VmaSuballocationType suballocType = suballoc.type;
10343 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
10345 minAlignment = VMA_MIN(minAlignment, suballoc.hAllocation->GetAlignment());
10346 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
10348 typeConflictFound =
true;
10350 inOutPrevSuballocType = suballocType;
10354 return typeConflictFound || minAlignment >= bufferImageGranularity;
10360 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
10361 VmaBlockMetadata(hAllocator),
10363 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10364 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10365 m_1stVectorIndex(0),
10366 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
10367 m_1stNullItemsBeginCount(0),
10368 m_1stNullItemsMiddleCount(0),
10369 m_2ndNullItemsCount(0)
10373 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10377 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10379 VmaBlockMetadata::Init(size);
10380 m_SumFreeSize = size;
10383 bool VmaBlockMetadata_Linear::Validate()
const
10385 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10386 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10388 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10389 VMA_VALIDATE(!suballocations1st.empty() ||
10390 suballocations2nd.empty() ||
10391 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10393 if(!suballocations1st.empty())
10396 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10398 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10400 if(!suballocations2nd.empty())
10403 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10406 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10407 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10409 VkDeviceSize sumUsedSize = 0;
10410 const size_t suballoc1stCount = suballocations1st.size();
10411 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10413 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10415 const size_t suballoc2ndCount = suballocations2nd.size();
10416 size_t nullItem2ndCount = 0;
10417 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10419 const VmaSuballocation& suballoc = suballocations2nd[i];
10420 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10422 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10423 VMA_VALIDATE(suballoc.offset >= offset);
10427 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10428 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10429 sumUsedSize += suballoc.size;
10433 ++nullItem2ndCount;
10436 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10439 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10442 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10444 const VmaSuballocation& suballoc = suballocations1st[i];
10445 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10446 suballoc.hAllocation == VK_NULL_HANDLE);
10449 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10451 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10453 const VmaSuballocation& suballoc = suballocations1st[i];
10454 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10456 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10457 VMA_VALIDATE(suballoc.offset >= offset);
10458 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10462 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10463 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10464 sumUsedSize += suballoc.size;
10468 ++nullItem1stCount;
10471 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10473 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10475 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10477 const size_t suballoc2ndCount = suballocations2nd.size();
10478 size_t nullItem2ndCount = 0;
10479 for(
size_t i = suballoc2ndCount; i--; )
10481 const VmaSuballocation& suballoc = suballocations2nd[i];
10482 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10484 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10485 VMA_VALIDATE(suballoc.offset >= offset);
10489 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10490 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10491 sumUsedSize += suballoc.size;
10495 ++nullItem2ndCount;
10498 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10501 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10504 VMA_VALIDATE(offset <= GetSize());
10505 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10510 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10512 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10513 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10516 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10518 const VkDeviceSize size = GetSize();
10530 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10532 switch(m_2ndVectorMode)
10534 case SECOND_VECTOR_EMPTY:
10540 const size_t suballocations1stCount = suballocations1st.size();
10541 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10542 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10543 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10545 firstSuballoc.offset,
10546 size - (lastSuballoc.offset + lastSuballoc.size));
10550 case SECOND_VECTOR_RING_BUFFER:
10555 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10556 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10557 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10558 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10562 case SECOND_VECTOR_DOUBLE_STACK:
10567 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10568 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10569 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10570 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10580 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10582 const VkDeviceSize size = GetSize();
10583 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10584 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10585 const size_t suballoc1stCount = suballocations1st.size();
10586 const size_t suballoc2ndCount = suballocations2nd.size();
10597 VkDeviceSize lastOffset = 0;
10599 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10601 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10602 size_t nextAlloc2ndIndex = 0;
10603 while(lastOffset < freeSpace2ndTo1stEnd)
10606 while(nextAlloc2ndIndex < suballoc2ndCount &&
10607 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10609 ++nextAlloc2ndIndex;
10613 if(nextAlloc2ndIndex < suballoc2ndCount)
10615 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10618 if(lastOffset < suballoc.offset)
10621 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10635 lastOffset = suballoc.offset + suballoc.size;
10636 ++nextAlloc2ndIndex;
10642 if(lastOffset < freeSpace2ndTo1stEnd)
10644 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10652 lastOffset = freeSpace2ndTo1stEnd;
10657 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10658 const VkDeviceSize freeSpace1stTo2ndEnd =
10659 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10660 while(lastOffset < freeSpace1stTo2ndEnd)
10663 while(nextAlloc1stIndex < suballoc1stCount &&
10664 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10666 ++nextAlloc1stIndex;
10670 if(nextAlloc1stIndex < suballoc1stCount)
10672 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10675 if(lastOffset < suballoc.offset)
10678 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10692 lastOffset = suballoc.offset + suballoc.size;
10693 ++nextAlloc1stIndex;
10699 if(lastOffset < freeSpace1stTo2ndEnd)
10701 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10709 lastOffset = freeSpace1stTo2ndEnd;
10713 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10715 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10716 while(lastOffset < size)
10719 while(nextAlloc2ndIndex != SIZE_MAX &&
10720 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10722 --nextAlloc2ndIndex;
10726 if(nextAlloc2ndIndex != SIZE_MAX)
10728 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10731 if(lastOffset < suballoc.offset)
10734 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10748 lastOffset = suballoc.offset + suballoc.size;
10749 --nextAlloc2ndIndex;
10755 if(lastOffset < size)
10757 const VkDeviceSize unusedRangeSize = size - lastOffset;
10773 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10775 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10776 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10777 const VkDeviceSize size = GetSize();
10778 const size_t suballoc1stCount = suballocations1st.size();
10779 const size_t suballoc2ndCount = suballocations2nd.size();
10781 inoutStats.
size += size;
10783 VkDeviceSize lastOffset = 0;
10785 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10787 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10788 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10789 while(lastOffset < freeSpace2ndTo1stEnd)
10792 while(nextAlloc2ndIndex < suballoc2ndCount &&
10793 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10795 ++nextAlloc2ndIndex;
10799 if(nextAlloc2ndIndex < suballoc2ndCount)
10801 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10804 if(lastOffset < suballoc.offset)
10807 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10818 lastOffset = suballoc.offset + suballoc.size;
10819 ++nextAlloc2ndIndex;
10824 if(lastOffset < freeSpace2ndTo1stEnd)
10827 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10834 lastOffset = freeSpace2ndTo1stEnd;
10839 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10840 const VkDeviceSize freeSpace1stTo2ndEnd =
10841 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10842 while(lastOffset < freeSpace1stTo2ndEnd)
10845 while(nextAlloc1stIndex < suballoc1stCount &&
10846 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10848 ++nextAlloc1stIndex;
10852 if(nextAlloc1stIndex < suballoc1stCount)
10854 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10857 if(lastOffset < suballoc.offset)
10860 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10871 lastOffset = suballoc.offset + suballoc.size;
10872 ++nextAlloc1stIndex;
10877 if(lastOffset < freeSpace1stTo2ndEnd)
10880 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10887 lastOffset = freeSpace1stTo2ndEnd;
10891 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10893 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10894 while(lastOffset < size)
10897 while(nextAlloc2ndIndex != SIZE_MAX &&
10898 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10900 --nextAlloc2ndIndex;
10904 if(nextAlloc2ndIndex != SIZE_MAX)
10906 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10909 if(lastOffset < suballoc.offset)
10912 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10923 lastOffset = suballoc.offset + suballoc.size;
10924 --nextAlloc2ndIndex;
10929 if(lastOffset < size)
10932 const VkDeviceSize unusedRangeSize = size - lastOffset;
10945 #if VMA_STATS_STRING_ENABLED
10946 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10948 const VkDeviceSize size = GetSize();
10949 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10950 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10951 const size_t suballoc1stCount = suballocations1st.size();
10952 const size_t suballoc2ndCount = suballocations2nd.size();
10956 size_t unusedRangeCount = 0;
10957 VkDeviceSize usedBytes = 0;
10959 VkDeviceSize lastOffset = 0;
10961 size_t alloc2ndCount = 0;
10962 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10964 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10965 size_t nextAlloc2ndIndex = 0;
10966 while(lastOffset < freeSpace2ndTo1stEnd)
10969 while(nextAlloc2ndIndex < suballoc2ndCount &&
10970 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10972 ++nextAlloc2ndIndex;
10976 if(nextAlloc2ndIndex < suballoc2ndCount)
10978 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10981 if(lastOffset < suballoc.offset)
10984 ++unusedRangeCount;
10990 usedBytes += suballoc.size;
10993 lastOffset = suballoc.offset + suballoc.size;
10994 ++nextAlloc2ndIndex;
10999 if(lastOffset < freeSpace2ndTo1stEnd)
11002 ++unusedRangeCount;
11006 lastOffset = freeSpace2ndTo1stEnd;
11011 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
11012 size_t alloc1stCount = 0;
11013 const VkDeviceSize freeSpace1stTo2ndEnd =
11014 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
11015 while(lastOffset < freeSpace1stTo2ndEnd)
11018 while(nextAlloc1stIndex < suballoc1stCount &&
11019 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11021 ++nextAlloc1stIndex;
11025 if(nextAlloc1stIndex < suballoc1stCount)
11027 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11030 if(lastOffset < suballoc.offset)
11033 ++unusedRangeCount;
11039 usedBytes += suballoc.size;
11042 lastOffset = suballoc.offset + suballoc.size;
11043 ++nextAlloc1stIndex;
11048 if(lastOffset < size)
11051 ++unusedRangeCount;
11055 lastOffset = freeSpace1stTo2ndEnd;
11059 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11061 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11062 while(lastOffset < size)
11065 while(nextAlloc2ndIndex != SIZE_MAX &&
11066 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11068 --nextAlloc2ndIndex;
11072 if(nextAlloc2ndIndex != SIZE_MAX)
11074 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11077 if(lastOffset < suballoc.offset)
11080 ++unusedRangeCount;
11086 usedBytes += suballoc.size;
11089 lastOffset = suballoc.offset + suballoc.size;
11090 --nextAlloc2ndIndex;
11095 if(lastOffset < size)
11098 ++unusedRangeCount;
11107 const VkDeviceSize unusedBytes = size - usedBytes;
11108 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
11113 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11115 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
11116 size_t nextAlloc2ndIndex = 0;
11117 while(lastOffset < freeSpace2ndTo1stEnd)
11120 while(nextAlloc2ndIndex < suballoc2ndCount &&
11121 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11123 ++nextAlloc2ndIndex;
11127 if(nextAlloc2ndIndex < suballoc2ndCount)
11129 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11132 if(lastOffset < suballoc.offset)
11135 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11136 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11141 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11144 lastOffset = suballoc.offset + suballoc.size;
11145 ++nextAlloc2ndIndex;
11150 if(lastOffset < freeSpace2ndTo1stEnd)
11153 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
11154 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11158 lastOffset = freeSpace2ndTo1stEnd;
11163 nextAlloc1stIndex = m_1stNullItemsBeginCount;
11164 while(lastOffset < freeSpace1stTo2ndEnd)
11167 while(nextAlloc1stIndex < suballoc1stCount &&
11168 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11170 ++nextAlloc1stIndex;
11174 if(nextAlloc1stIndex < suballoc1stCount)
11176 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11179 if(lastOffset < suballoc.offset)
11182 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11183 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11188 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11191 lastOffset = suballoc.offset + suballoc.size;
11192 ++nextAlloc1stIndex;
11197 if(lastOffset < freeSpace1stTo2ndEnd)
11200 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
11201 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11205 lastOffset = freeSpace1stTo2ndEnd;
11209 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11211 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11212 while(lastOffset < size)
11215 while(nextAlloc2ndIndex != SIZE_MAX &&
11216 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11218 --nextAlloc2ndIndex;
11222 if(nextAlloc2ndIndex != SIZE_MAX)
11224 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11227 if(lastOffset < suballoc.offset)
11230 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11231 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11236 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11239 lastOffset = suballoc.offset + suballoc.size;
11240 --nextAlloc2ndIndex;
11245 if(lastOffset < size)
11248 const VkDeviceSize unusedRangeSize = size - lastOffset;
11249 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11258 PrintDetailedMap_End(json);
11262 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
11263 uint32_t currentFrameIndex,
11264 uint32_t frameInUseCount,
11265 VkDeviceSize bufferImageGranularity,
11266 VkDeviceSize allocSize,
11267 VkDeviceSize allocAlignment,
11269 VmaSuballocationType allocType,
11270 bool canMakeOtherLost,
11272 VmaAllocationRequest* pAllocationRequest)
11274 VMA_ASSERT(allocSize > 0);
11275 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
11276 VMA_ASSERT(pAllocationRequest != VMA_NULL);
11277 VMA_HEAVY_ASSERT(Validate());
11278 return upperAddress ?
11279 CreateAllocationRequest_UpperAddress(
11280 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11281 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
11282 CreateAllocationRequest_LowerAddress(
11283 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11284 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
11287 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
11288 uint32_t currentFrameIndex,
11289 uint32_t frameInUseCount,
11290 VkDeviceSize bufferImageGranularity,
11291 VkDeviceSize allocSize,
11292 VkDeviceSize allocAlignment,
11293 VmaSuballocationType allocType,
11294 bool canMakeOtherLost,
11296 VmaAllocationRequest* pAllocationRequest)
11298 const VkDeviceSize size = GetSize();
11299 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11300 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11302 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11304 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
11309 if(allocSize > size)
11313 VkDeviceSize resultBaseOffset = size - allocSize;
11314 if(!suballocations2nd.empty())
11316 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11317 resultBaseOffset = lastSuballoc.offset - allocSize;
11318 if(allocSize > lastSuballoc.offset)
11325 VkDeviceSize resultOffset = resultBaseOffset;
11328 if(VMA_DEBUG_MARGIN > 0)
11330 if(resultOffset < VMA_DEBUG_MARGIN)
11334 resultOffset -= VMA_DEBUG_MARGIN;
11338 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
11342 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11344 bool bufferImageGranularityConflict =
false;
11345 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11347 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11348 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11350 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
11352 bufferImageGranularityConflict =
true;
11360 if(bufferImageGranularityConflict)
11362 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
11367 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
11368 suballocations1st.back().offset + suballocations1st.back().size :
11370 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
11374 if(bufferImageGranularity > 1)
11376 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11378 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11379 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11381 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11395 pAllocationRequest->offset = resultOffset;
11396 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11397 pAllocationRequest->sumItemSize = 0;
11399 pAllocationRequest->itemsToMakeLostCount = 0;
11400 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11407 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11408 uint32_t currentFrameIndex,
11409 uint32_t frameInUseCount,
11410 VkDeviceSize bufferImageGranularity,
11411 VkDeviceSize allocSize,
11412 VkDeviceSize allocAlignment,
11413 VmaSuballocationType allocType,
11414 bool canMakeOtherLost,
11416 VmaAllocationRequest* pAllocationRequest)
11418 const VkDeviceSize size = GetSize();
11419 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11420 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11422 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11426 VkDeviceSize resultBaseOffset = 0;
11427 if(!suballocations1st.empty())
11429 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11430 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11434 VkDeviceSize resultOffset = resultBaseOffset;
11437 if(VMA_DEBUG_MARGIN > 0)
11439 resultOffset += VMA_DEBUG_MARGIN;
11443 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11447 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
11449 bool bufferImageGranularityConflict =
false;
11450 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11452 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11453 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11455 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11457 bufferImageGranularityConflict =
true;
11465 if(bufferImageGranularityConflict)
11467 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11471 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11472 suballocations2nd.back().offset : size;
11475 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11479 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11481 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11483 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11484 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11486 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11500 pAllocationRequest->offset = resultOffset;
11501 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11502 pAllocationRequest->sumItemSize = 0;
11504 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11505 pAllocationRequest->itemsToMakeLostCount = 0;
11512 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11514 VMA_ASSERT(!suballocations1st.empty());
11516 VkDeviceSize resultBaseOffset = 0;
11517 if(!suballocations2nd.empty())
11519 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11520 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11524 VkDeviceSize resultOffset = resultBaseOffset;
11527 if(VMA_DEBUG_MARGIN > 0)
11529 resultOffset += VMA_DEBUG_MARGIN;
11533 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11537 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11539 bool bufferImageGranularityConflict =
false;
11540 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11542 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11543 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11545 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11547 bufferImageGranularityConflict =
true;
11555 if(bufferImageGranularityConflict)
11557 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11561 pAllocationRequest->itemsToMakeLostCount = 0;
11562 pAllocationRequest->sumItemSize = 0;
11563 size_t index1st = m_1stNullItemsBeginCount;
11565 if(canMakeOtherLost)
11567 while(index1st < suballocations1st.size() &&
11568 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11571 const VmaSuballocation& suballoc = suballocations1st[index1st];
11572 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11578 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11579 if(suballoc.hAllocation->CanBecomeLost() &&
11580 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11582 ++pAllocationRequest->itemsToMakeLostCount;
11583 pAllocationRequest->sumItemSize += suballoc.size;
11595 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11597 while(index1st < suballocations1st.size())
11599 const VmaSuballocation& suballoc = suballocations1st[index1st];
11600 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11602 if(suballoc.hAllocation != VK_NULL_HANDLE)
11605 if(suballoc.hAllocation->CanBecomeLost() &&
11606 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11608 ++pAllocationRequest->itemsToMakeLostCount;
11609 pAllocationRequest->sumItemSize += suballoc.size;
11627 if(index1st == suballocations1st.size() &&
11628 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11631 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11636 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11637 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11641 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11643 for(
size_t nextSuballocIndex = index1st;
11644 nextSuballocIndex < suballocations1st.size();
11645 nextSuballocIndex++)
11647 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11648 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11650 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11664 pAllocationRequest->offset = resultOffset;
11665 pAllocationRequest->sumFreeSize =
11666 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11668 - pAllocationRequest->sumItemSize;
11669 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11678 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11679 uint32_t currentFrameIndex,
11680 uint32_t frameInUseCount,
11681 VmaAllocationRequest* pAllocationRequest)
11683 if(pAllocationRequest->itemsToMakeLostCount == 0)
11688 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11691 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11692 size_t index = m_1stNullItemsBeginCount;
11693 size_t madeLostCount = 0;
11694 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11696 if(index == suballocations->size())
11700 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11702 suballocations = &AccessSuballocations2nd();
11706 VMA_ASSERT(!suballocations->empty());
11708 VmaSuballocation& suballoc = (*suballocations)[index];
11709 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11711 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11712 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11713 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11715 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11716 suballoc.hAllocation = VK_NULL_HANDLE;
11717 m_SumFreeSize += suballoc.size;
11718 if(suballocations == &AccessSuballocations1st())
11720 ++m_1stNullItemsMiddleCount;
11724 ++m_2ndNullItemsCount;
11736 CleanupAfterFree();
11742 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11744 uint32_t lostAllocationCount = 0;
11746 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11747 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11749 VmaSuballocation& suballoc = suballocations1st[i];
11750 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11751 suballoc.hAllocation->CanBecomeLost() &&
11752 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11754 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11755 suballoc.hAllocation = VK_NULL_HANDLE;
11756 ++m_1stNullItemsMiddleCount;
11757 m_SumFreeSize += suballoc.size;
11758 ++lostAllocationCount;
11762 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11763 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11765 VmaSuballocation& suballoc = suballocations2nd[i];
11766 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11767 suballoc.hAllocation->CanBecomeLost() &&
11768 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11770 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11771 suballoc.hAllocation = VK_NULL_HANDLE;
11772 ++m_2ndNullItemsCount;
11773 m_SumFreeSize += suballoc.size;
11774 ++lostAllocationCount;
11778 if(lostAllocationCount)
11780 CleanupAfterFree();
11783 return lostAllocationCount;
11786 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11788 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11789 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11791 const VmaSuballocation& suballoc = suballocations1st[i];
11792 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11794 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11796 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11797 return VK_ERROR_VALIDATION_FAILED_EXT;
11799 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11801 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11802 return VK_ERROR_VALIDATION_FAILED_EXT;
11807 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11808 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11810 const VmaSuballocation& suballoc = suballocations2nd[i];
11811 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11813 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11815 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11816 return VK_ERROR_VALIDATION_FAILED_EXT;
11818 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11820 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11821 return VK_ERROR_VALIDATION_FAILED_EXT;
11829 void VmaBlockMetadata_Linear::Alloc(
11830 const VmaAllocationRequest& request,
11831 VmaSuballocationType type,
11832 VkDeviceSize allocSize,
11835 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11837 switch(request.type)
11839 case VmaAllocationRequestType::UpperAddress:
11841 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11842 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11843 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11844 suballocations2nd.push_back(newSuballoc);
11845 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11848 case VmaAllocationRequestType::EndOf1st:
11850 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11852 VMA_ASSERT(suballocations1st.empty() ||
11853 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11855 VMA_ASSERT(request.offset + allocSize <= GetSize());
11857 suballocations1st.push_back(newSuballoc);
11860 case VmaAllocationRequestType::EndOf2nd:
11862 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11864 VMA_ASSERT(!suballocations1st.empty() &&
11865 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11866 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11868 switch(m_2ndVectorMode)
11870 case SECOND_VECTOR_EMPTY:
11872 VMA_ASSERT(suballocations2nd.empty());
11873 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11875 case SECOND_VECTOR_RING_BUFFER:
11877 VMA_ASSERT(!suballocations2nd.empty());
11879 case SECOND_VECTOR_DOUBLE_STACK:
11880 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11886 suballocations2nd.push_back(newSuballoc);
11890 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11893 m_SumFreeSize -= newSuballoc.size;
11896 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11898 FreeAtOffset(allocation->GetOffset());
11901 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11903 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11904 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11906 if(!suballocations1st.empty())
11909 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11910 if(firstSuballoc.offset == offset)
11912 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11913 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11914 m_SumFreeSize += firstSuballoc.size;
11915 ++m_1stNullItemsBeginCount;
11916 CleanupAfterFree();
11922 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11923 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11925 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11926 if(lastSuballoc.offset == offset)
11928 m_SumFreeSize += lastSuballoc.size;
11929 suballocations2nd.pop_back();
11930 CleanupAfterFree();
11935 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11937 VmaSuballocation& lastSuballoc = suballocations1st.back();
11938 if(lastSuballoc.offset == offset)
11940 m_SumFreeSize += lastSuballoc.size;
11941 suballocations1st.pop_back();
11942 CleanupAfterFree();
11949 VmaSuballocation refSuballoc;
11950 refSuballoc.offset = offset;
11952 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11953 suballocations1st.begin() + m_1stNullItemsBeginCount,
11954 suballocations1st.end(),
11956 VmaSuballocationOffsetLess());
11957 if(it != suballocations1st.end())
11959 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11960 it->hAllocation = VK_NULL_HANDLE;
11961 ++m_1stNullItemsMiddleCount;
11962 m_SumFreeSize += it->size;
11963 CleanupAfterFree();
11968 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11971 VmaSuballocation refSuballoc;
11972 refSuballoc.offset = offset;
11974 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11975 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11976 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11977 if(it != suballocations2nd.end())
11979 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11980 it->hAllocation = VK_NULL_HANDLE;
11981 ++m_2ndNullItemsCount;
11982 m_SumFreeSize += it->size;
11983 CleanupAfterFree();
11988 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11991 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11993 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11994 const size_t suballocCount = AccessSuballocations1st().size();
11995 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11998 void VmaBlockMetadata_Linear::CleanupAfterFree()
12000 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
12001 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
12005 suballocations1st.clear();
12006 suballocations2nd.clear();
12007 m_1stNullItemsBeginCount = 0;
12008 m_1stNullItemsMiddleCount = 0;
12009 m_2ndNullItemsCount = 0;
12010 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12014 const size_t suballoc1stCount = suballocations1st.size();
12015 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
12016 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
12019 while(m_1stNullItemsBeginCount < suballoc1stCount &&
12020 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12022 ++m_1stNullItemsBeginCount;
12023 --m_1stNullItemsMiddleCount;
12027 while(m_1stNullItemsMiddleCount > 0 &&
12028 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
12030 --m_1stNullItemsMiddleCount;
12031 suballocations1st.pop_back();
12035 while(m_2ndNullItemsCount > 0 &&
12036 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
12038 --m_2ndNullItemsCount;
12039 suballocations2nd.pop_back();
12043 while(m_2ndNullItemsCount > 0 &&
12044 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
12046 --m_2ndNullItemsCount;
12047 VmaVectorRemove(suballocations2nd, 0);
12050 if(ShouldCompact1st())
12052 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
12053 size_t srcIndex = m_1stNullItemsBeginCount;
12054 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
12056 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
12060 if(dstIndex != srcIndex)
12062 suballocations1st[dstIndex] = suballocations1st[srcIndex];
12066 suballocations1st.resize(nonNullItemCount);
12067 m_1stNullItemsBeginCount = 0;
12068 m_1stNullItemsMiddleCount = 0;
12072 if(suballocations2nd.empty())
12074 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12078 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
12080 suballocations1st.clear();
12081 m_1stNullItemsBeginCount = 0;
12083 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
12086 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12087 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
12088 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
12089 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12091 ++m_1stNullItemsBeginCount;
12092 --m_1stNullItemsMiddleCount;
12094 m_2ndNullItemsCount = 0;
12095 m_1stVectorIndex ^= 1;
12100 VMA_HEAVY_ASSERT(Validate());
12107 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
12108 VmaBlockMetadata(hAllocator),
12110 m_AllocationCount(0),
12114 memset(m_FreeList, 0,
sizeof(m_FreeList));
12117 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
12119 DeleteNode(m_Root);
12122 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
12124 VmaBlockMetadata::Init(size);
12126 m_UsableSize = VmaPrevPow2(size);
12127 m_SumFreeSize = m_UsableSize;
12131 while(m_LevelCount < MAX_LEVELS &&
12132 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
12137 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
12138 rootNode->offset = 0;
12139 rootNode->type = Node::TYPE_FREE;
12140 rootNode->parent = VMA_NULL;
12141 rootNode->buddy = VMA_NULL;
12144 AddToFreeListFront(0, rootNode);
12147 bool VmaBlockMetadata_Buddy::Validate()
const
12150 ValidationContext ctx;
12151 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
12153 VMA_VALIDATE(
false &&
"ValidateNode failed.");
12155 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
12156 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
12159 for(uint32_t level = 0; level < m_LevelCount; ++level)
12161 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
12162 m_FreeList[level].front->free.prev == VMA_NULL);
12164 for(Node* node = m_FreeList[level].front;
12166 node = node->free.next)
12168 VMA_VALIDATE(node->type == Node::TYPE_FREE);
12170 if(node->free.next == VMA_NULL)
12172 VMA_VALIDATE(m_FreeList[level].back == node);
12176 VMA_VALIDATE(node->free.next->free.prev == node);
12182 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
12184 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
12190 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
12192 for(uint32_t level = 0; level < m_LevelCount; ++level)
12194 if(m_FreeList[level].front != VMA_NULL)
12196 return LevelToNodeSize(level);
12202 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
12204 const VkDeviceSize unusableSize = GetUnusableSize();
12215 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
12217 if(unusableSize > 0)
12226 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
12228 const VkDeviceSize unusableSize = GetUnusableSize();
12230 inoutStats.
size += GetSize();
12231 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
12236 if(unusableSize > 0)
12243 #if VMA_STATS_STRING_ENABLED
12245 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
12249 CalcAllocationStatInfo(stat);
12251 PrintDetailedMap_Begin(
12257 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
12259 const VkDeviceSize unusableSize = GetUnusableSize();
12260 if(unusableSize > 0)
12262 PrintDetailedMap_UnusedRange(json,
12267 PrintDetailedMap_End(json);
12272 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
12273 uint32_t currentFrameIndex,
12274 uint32_t frameInUseCount,
12275 VkDeviceSize bufferImageGranularity,
12276 VkDeviceSize allocSize,
12277 VkDeviceSize allocAlignment,
12279 VmaSuballocationType allocType,
12280 bool canMakeOtherLost,
12282 VmaAllocationRequest* pAllocationRequest)
12284 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
12288 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
12289 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
12290 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
12292 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
12293 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
12296 if(allocSize > m_UsableSize)
12301 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12302 for(uint32_t level = targetLevel + 1; level--; )
12304 for(Node* freeNode = m_FreeList[level].front;
12305 freeNode != VMA_NULL;
12306 freeNode = freeNode->free.next)
12308 if(freeNode->offset % allocAlignment == 0)
12310 pAllocationRequest->type = VmaAllocationRequestType::Normal;
12311 pAllocationRequest->offset = freeNode->offset;
12312 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
12313 pAllocationRequest->sumItemSize = 0;
12314 pAllocationRequest->itemsToMakeLostCount = 0;
12315 pAllocationRequest->customData = (
void*)(uintptr_t)level;
12324 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
12325 uint32_t currentFrameIndex,
12326 uint32_t frameInUseCount,
12327 VmaAllocationRequest* pAllocationRequest)
12333 return pAllocationRequest->itemsToMakeLostCount == 0;
12336 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
12345 void VmaBlockMetadata_Buddy::Alloc(
12346 const VmaAllocationRequest& request,
12347 VmaSuballocationType type,
12348 VkDeviceSize allocSize,
12351 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
12353 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12354 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
12356 Node* currNode = m_FreeList[currLevel].front;
12357 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12358 while(currNode->offset != request.offset)
12360 currNode = currNode->free.next;
12361 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12365 while(currLevel < targetLevel)
12369 RemoveFromFreeList(currLevel, currNode);
12371 const uint32_t childrenLevel = currLevel + 1;
12374 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12375 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12377 leftChild->offset = currNode->offset;
12378 leftChild->type = Node::TYPE_FREE;
12379 leftChild->parent = currNode;
12380 leftChild->buddy = rightChild;
12382 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12383 rightChild->type = Node::TYPE_FREE;
12384 rightChild->parent = currNode;
12385 rightChild->buddy = leftChild;
12388 currNode->type = Node::TYPE_SPLIT;
12389 currNode->split.leftChild = leftChild;
12392 AddToFreeListFront(childrenLevel, rightChild);
12393 AddToFreeListFront(childrenLevel, leftChild);
12398 currNode = m_FreeList[currLevel].front;
12407 VMA_ASSERT(currLevel == targetLevel &&
12408 currNode != VMA_NULL &&
12409 currNode->type == Node::TYPE_FREE);
12410 RemoveFromFreeList(currLevel, currNode);
12413 currNode->type = Node::TYPE_ALLOCATION;
12414 currNode->allocation.alloc = hAllocation;
12416 ++m_AllocationCount;
12418 m_SumFreeSize -= allocSize;
12421 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12423 if(node->type == Node::TYPE_SPLIT)
12425 DeleteNode(node->split.leftChild->buddy);
12426 DeleteNode(node->split.leftChild);
12429 vma_delete(GetAllocationCallbacks(), node);
12432 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12434 VMA_VALIDATE(level < m_LevelCount);
12435 VMA_VALIDATE(curr->parent == parent);
12436 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12437 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12440 case Node::TYPE_FREE:
12442 ctx.calculatedSumFreeSize += levelNodeSize;
12443 ++ctx.calculatedFreeCount;
12445 case Node::TYPE_ALLOCATION:
12446 ++ctx.calculatedAllocationCount;
12447 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12448 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12450 case Node::TYPE_SPLIT:
12452 const uint32_t childrenLevel = level + 1;
12453 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12454 const Node*
const leftChild = curr->split.leftChild;
12455 VMA_VALIDATE(leftChild != VMA_NULL);
12456 VMA_VALIDATE(leftChild->offset == curr->offset);
12457 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12459 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12461 const Node*
const rightChild = leftChild->buddy;
12462 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12463 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12465 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12476 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12479 uint32_t level = 0;
12480 VkDeviceSize currLevelNodeSize = m_UsableSize;
12481 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12482 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12485 currLevelNodeSize = nextLevelNodeSize;
12486 nextLevelNodeSize = currLevelNodeSize >> 1;
12491 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12494 Node* node = m_Root;
12495 VkDeviceSize nodeOffset = 0;
12496 uint32_t level = 0;
12497 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12498 while(node->type == Node::TYPE_SPLIT)
12500 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12501 if(offset < nodeOffset + nextLevelSize)
12503 node = node->split.leftChild;
12507 node = node->split.leftChild->buddy;
12508 nodeOffset += nextLevelSize;
12511 levelNodeSize = nextLevelSize;
12514 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12515 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12518 --m_AllocationCount;
12519 m_SumFreeSize += alloc->GetSize();
12521 node->type = Node::TYPE_FREE;
12524 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12526 RemoveFromFreeList(level, node->buddy);
12527 Node*
const parent = node->parent;
12529 vma_delete(GetAllocationCallbacks(), node->buddy);
12530 vma_delete(GetAllocationCallbacks(), node);
12531 parent->type = Node::TYPE_FREE;
12539 AddToFreeListFront(level, node);
12542 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12546 case Node::TYPE_FREE:
12552 case Node::TYPE_ALLOCATION:
12554 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12560 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12561 if(unusedRangeSize > 0)
12570 case Node::TYPE_SPLIT:
12572 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12573 const Node*
const leftChild = node->split.leftChild;
12574 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12575 const Node*
const rightChild = leftChild->buddy;
12576 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12584 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12586 VMA_ASSERT(node->type == Node::TYPE_FREE);
12589 Node*
const frontNode = m_FreeList[level].front;
12590 if(frontNode == VMA_NULL)
12592 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12593 node->free.prev = node->free.next = VMA_NULL;
12594 m_FreeList[level].front = m_FreeList[level].back = node;
12598 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12599 node->free.prev = VMA_NULL;
12600 node->free.next = frontNode;
12601 frontNode->free.prev = node;
12602 m_FreeList[level].front = node;
12606 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12608 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12611 if(node->free.prev == VMA_NULL)
12613 VMA_ASSERT(m_FreeList[level].front == node);
12614 m_FreeList[level].front = node->free.next;
12618 Node*
const prevFreeNode = node->free.prev;
12619 VMA_ASSERT(prevFreeNode->free.next == node);
12620 prevFreeNode->free.next = node->free.next;
12624 if(node->free.next == VMA_NULL)
12626 VMA_ASSERT(m_FreeList[level].back == node);
12627 m_FreeList[level].back = node->free.prev;
12631 Node*
const nextFreeNode = node->free.next;
12632 VMA_ASSERT(nextFreeNode->free.prev == node);
12633 nextFreeNode->free.prev = node->free.prev;
12637 #if VMA_STATS_STRING_ENABLED
12638 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12642 case Node::TYPE_FREE:
12643 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12645 case Node::TYPE_ALLOCATION:
12647 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12648 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12649 if(allocSize < levelNodeSize)
12651 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12655 case Node::TYPE_SPLIT:
12657 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12658 const Node*
const leftChild = node->split.leftChild;
12659 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12660 const Node*
const rightChild = leftChild->buddy;
12661 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12674 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12675 m_pMetadata(VMA_NULL),
12676 m_MemoryTypeIndex(UINT32_MAX),
12678 m_hMemory(VK_NULL_HANDLE),
12680 m_pMappedData(VMA_NULL)
12684 void VmaDeviceMemoryBlock::Init(
12687 uint32_t newMemoryTypeIndex,
12688 VkDeviceMemory newMemory,
12689 VkDeviceSize newSize,
12691 uint32_t algorithm)
12693 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12695 m_hParentPool = hParentPool;
12696 m_MemoryTypeIndex = newMemoryTypeIndex;
12698 m_hMemory = newMemory;
12703 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12706 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12712 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12714 m_pMetadata->Init(newSize);
12717 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12721 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12723 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12724 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12725 m_hMemory = VK_NULL_HANDLE;
12727 vma_delete(allocator, m_pMetadata);
12728 m_pMetadata = VMA_NULL;
12731 bool VmaDeviceMemoryBlock::Validate()
const
12733 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12734 (m_pMetadata->GetSize() != 0));
12736 return m_pMetadata->Validate();
12739 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12741 void* pData =
nullptr;
12742 VkResult res = Map(hAllocator, 1, &pData);
12743 if(res != VK_SUCCESS)
12748 res = m_pMetadata->CheckCorruption(pData);
12750 Unmap(hAllocator, 1);
12755 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12762 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12763 if(m_MapCount != 0)
12765 m_MapCount += count;
12766 VMA_ASSERT(m_pMappedData != VMA_NULL);
12767 if(ppData != VMA_NULL)
12769 *ppData = m_pMappedData;
12775 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12776 hAllocator->m_hDevice,
12782 if(result == VK_SUCCESS)
12784 if(ppData != VMA_NULL)
12786 *ppData = m_pMappedData;
12788 m_MapCount = count;
12794 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12801 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12802 if(m_MapCount >= count)
12804 m_MapCount -= count;
12805 if(m_MapCount == 0)
12807 m_pMappedData = VMA_NULL;
12808 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12813 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12817 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12819 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12820 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12823 VkResult res = Map(hAllocator, 1, &pData);
12824 if(res != VK_SUCCESS)
12829 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12830 VmaWriteMagicValue(pData, allocOffset + allocSize);
12832 Unmap(hAllocator, 1);
12837 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12839 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12840 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12843 VkResult res = Map(hAllocator, 1, &pData);
12844 if(res != VK_SUCCESS)
12849 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12851 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12853 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12855 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12858 Unmap(hAllocator, 1);
12863 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12866 VkDeviceSize allocationLocalOffset,
12870 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12871 hAllocation->GetBlock() ==
this);
12872 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12873 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12874 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12876 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12877 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12880 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12883 VkDeviceSize allocationLocalOffset,
12887 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12888 hAllocation->GetBlock() ==
this);
12889 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12890 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12891 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12893 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12894 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12899 memset(&outInfo, 0,
sizeof(outInfo));
12918 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12926 VmaPool_T::VmaPool_T(
12929 VkDeviceSize preferredBlockSize) :
12933 createInfo.memoryTypeIndex,
12934 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12935 createInfo.minBlockCount,
12936 createInfo.maxBlockCount,
12938 createInfo.frameInUseCount,
12939 createInfo.blockSize != 0,
12941 createInfo.priority,
12942 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment),
12943 createInfo.pMemoryAllocateNext),
12949 VmaPool_T::~VmaPool_T()
12951 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
12954 void VmaPool_T::SetName(
const char* pName)
12956 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12957 VmaFreeString(allocs, m_Name);
12959 if(pName != VMA_NULL)
12961 m_Name = VmaCreateStringCopy(allocs, pName);
12969 #if VMA_STATS_STRING_ENABLED
12973 VmaBlockVector::VmaBlockVector(
12976 uint32_t memoryTypeIndex,
12977 VkDeviceSize preferredBlockSize,
12978 size_t minBlockCount,
12979 size_t maxBlockCount,
12980 VkDeviceSize bufferImageGranularity,
12981 uint32_t frameInUseCount,
12982 bool explicitBlockSize,
12983 uint32_t algorithm,
12985 VkDeviceSize minAllocationAlignment,
12986 void* pMemoryAllocateNext) :
12987 m_hAllocator(hAllocator),
12988 m_hParentPool(hParentPool),
12989 m_MemoryTypeIndex(memoryTypeIndex),
12990 m_PreferredBlockSize(preferredBlockSize),
12991 m_MinBlockCount(minBlockCount),
12992 m_MaxBlockCount(maxBlockCount),
12993 m_BufferImageGranularity(bufferImageGranularity),
12994 m_FrameInUseCount(frameInUseCount),
12995 m_ExplicitBlockSize(explicitBlockSize),
12996 m_Algorithm(algorithm),
12997 m_Priority(priority),
12998 m_MinAllocationAlignment(minAllocationAlignment),
12999 m_pMemoryAllocateNext(pMemoryAllocateNext),
13000 m_HasEmptyBlock(false),
13001 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
13006 VmaBlockVector::~VmaBlockVector()
13008 for(
size_t i = m_Blocks.size(); i--; )
13010 m_Blocks[i]->Destroy(m_hAllocator);
13011 vma_delete(m_hAllocator, m_Blocks[i]);
13015 VkResult VmaBlockVector::CreateMinBlocks()
13017 for(
size_t i = 0; i < m_MinBlockCount; ++i)
13019 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
13020 if(res != VK_SUCCESS)
13028 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
13030 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13032 const size_t blockCount = m_Blocks.size();
13041 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13043 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13044 VMA_ASSERT(pBlock);
13045 VMA_HEAVY_ASSERT(pBlock->Validate());
13046 pBlock->m_pMetadata->AddPoolStats(*pStats);
13050 bool VmaBlockVector::IsEmpty()
13052 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13053 return m_Blocks.empty();
13056 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
13058 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13059 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
13060 (VMA_DEBUG_MARGIN > 0) &&
13062 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
13065 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
13067 VkResult VmaBlockVector::Allocate(
13068 uint32_t currentFrameIndex,
13070 VkDeviceSize alignment,
13072 VmaSuballocationType suballocType,
13073 size_t allocationCount,
13077 VkResult res = VK_SUCCESS;
13079 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
13081 if(IsCorruptionDetectionEnabled())
13083 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13084 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13088 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13089 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13091 res = AllocatePage(
13097 pAllocations + allocIndex);
13098 if(res != VK_SUCCESS)
13105 if(res != VK_SUCCESS)
13108 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13109 while(allocIndex--)
13111 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
13112 const VkDeviceSize allocSize = alloc->GetSize();
13114 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
13116 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
13122 VkResult VmaBlockVector::AllocatePage(
13123 uint32_t currentFrameIndex,
13125 VkDeviceSize alignment,
13127 VmaSuballocationType suballocType,
13135 VkDeviceSize freeMemory;
13137 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13139 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13143 const bool canFallbackToDedicated = !IsCustomPool();
13144 const bool canCreateNewBlock =
13146 (m_Blocks.size() < m_MaxBlockCount) &&
13147 (freeMemory >= size || !canFallbackToDedicated);
13154 canMakeOtherLost =
false;
13158 if(isUpperAddress &&
13161 return VK_ERROR_FEATURE_NOT_PRESENT;
13175 return VK_ERROR_FEATURE_NOT_PRESENT;
13179 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
13181 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13189 if(!canMakeOtherLost || canCreateNewBlock)
13198 if(!m_Blocks.empty())
13200 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
13201 VMA_ASSERT(pCurrBlock);
13202 VkResult res = AllocateFromBlock(
13212 if(res == VK_SUCCESS)
13214 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
13224 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13226 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13227 VMA_ASSERT(pCurrBlock);
13228 VkResult res = AllocateFromBlock(
13238 if(res == VK_SUCCESS)
13240 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13248 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13250 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13251 VMA_ASSERT(pCurrBlock);
13252 VkResult res = AllocateFromBlock(
13262 if(res == VK_SUCCESS)
13264 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13272 if(canCreateNewBlock)
13275 VkDeviceSize newBlockSize = m_PreferredBlockSize;
13276 uint32_t newBlockSizeShift = 0;
13277 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
13279 if(!m_ExplicitBlockSize)
13282 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
13283 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
13285 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13286 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
13288 newBlockSize = smallerNewBlockSize;
13289 ++newBlockSizeShift;
13298 size_t newBlockIndex = 0;
13299 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13300 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13302 if(!m_ExplicitBlockSize)
13304 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
13306 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13307 if(smallerNewBlockSize >= size)
13309 newBlockSize = smallerNewBlockSize;
13310 ++newBlockSizeShift;
13311 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13312 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13321 if(res == VK_SUCCESS)
13323 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
13324 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
13326 res = AllocateFromBlock(
13336 if(res == VK_SUCCESS)
13338 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
13344 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13351 if(canMakeOtherLost)
13353 uint32_t tryIndex = 0;
13354 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
13356 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
13357 VmaAllocationRequest bestRequest = {};
13358 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
13364 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13366 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13367 VMA_ASSERT(pCurrBlock);
13368 VmaAllocationRequest currRequest = {};
13369 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13372 m_BufferImageGranularity,
13381 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13382 if(pBestRequestBlock == VMA_NULL ||
13383 currRequestCost < bestRequestCost)
13385 pBestRequestBlock = pCurrBlock;
13386 bestRequest = currRequest;
13387 bestRequestCost = currRequestCost;
13389 if(bestRequestCost == 0)
13400 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13402 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13403 VMA_ASSERT(pCurrBlock);
13404 VmaAllocationRequest currRequest = {};
13405 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13408 m_BufferImageGranularity,
13417 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13418 if(pBestRequestBlock == VMA_NULL ||
13419 currRequestCost < bestRequestCost ||
13422 pBestRequestBlock = pCurrBlock;
13423 bestRequest = currRequest;
13424 bestRequestCost = currRequestCost;
13426 if(bestRequestCost == 0 ||
13436 if(pBestRequestBlock != VMA_NULL)
13440 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13441 if(res != VK_SUCCESS)
13447 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13453 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13454 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13455 UpdateHasEmptyBlock();
13456 (*pAllocation)->InitBlockAllocation(
13458 bestRequest.offset,
13465 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13466 VMA_DEBUG_LOG(
" Returned from existing block");
13467 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13468 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13469 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13471 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13473 if(IsCorruptionDetectionEnabled())
13475 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13476 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13491 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13493 return VK_ERROR_TOO_MANY_OBJECTS;
13497 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13500 void VmaBlockVector::Free(
13503 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13505 bool budgetExceeded =
false;
13507 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13509 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13510 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13515 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13517 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13519 if(IsCorruptionDetectionEnabled())
13521 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13522 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13525 if(hAllocation->IsPersistentMap())
13527 pBlock->Unmap(m_hAllocator, 1);
13530 pBlock->m_pMetadata->Free(hAllocation);
13531 VMA_HEAVY_ASSERT(pBlock->Validate());
13533 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13535 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13537 if(pBlock->m_pMetadata->IsEmpty())
13540 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13542 pBlockToDelete = pBlock;
13549 else if(m_HasEmptyBlock && canDeleteBlock)
13551 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13552 if(pLastBlock->m_pMetadata->IsEmpty())
13554 pBlockToDelete = pLastBlock;
13555 m_Blocks.pop_back();
13559 UpdateHasEmptyBlock();
13560 IncrementallySortBlocks();
13565 if(pBlockToDelete != VMA_NULL)
13567 VMA_DEBUG_LOG(
" Deleted empty block");
13568 pBlockToDelete->Destroy(m_hAllocator);
13569 vma_delete(m_hAllocator, pBlockToDelete);
13573 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13575 VkDeviceSize result = 0;
13576 for(
size_t i = m_Blocks.size(); i--; )
13578 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13579 if(result >= m_PreferredBlockSize)
13587 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13589 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13591 if(m_Blocks[blockIndex] == pBlock)
13593 VmaVectorRemove(m_Blocks, blockIndex);
13600 void VmaBlockVector::IncrementallySortBlocks()
13605 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13607 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13609 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13616 VkResult VmaBlockVector::AllocateFromBlock(
13617 VmaDeviceMemoryBlock* pBlock,
13618 uint32_t currentFrameIndex,
13620 VkDeviceSize alignment,
13623 VmaSuballocationType suballocType,
13632 VmaAllocationRequest currRequest = {};
13633 if(pBlock->m_pMetadata->CreateAllocationRequest(
13636 m_BufferImageGranularity,
13646 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13650 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13651 if(res != VK_SUCCESS)
13657 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13658 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13659 UpdateHasEmptyBlock();
13660 (*pAllocation)->InitBlockAllocation(
13662 currRequest.offset,
13669 VMA_HEAVY_ASSERT(pBlock->Validate());
13670 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13671 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13672 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13674 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13676 if(IsCorruptionDetectionEnabled())
13678 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13679 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13683 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13686 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13688 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13689 allocInfo.pNext = m_pMemoryAllocateNext;
13690 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13691 allocInfo.allocationSize = blockSize;
13693 #if VMA_BUFFER_DEVICE_ADDRESS
13695 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13696 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13698 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13699 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13703 #if VMA_MEMORY_PRIORITY
13704 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
13705 if(m_hAllocator->m_UseExtMemoryPriority)
13707 priorityInfo.priority = m_Priority;
13708 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
13712 #if VMA_EXTERNAL_MEMORY
13714 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
13715 exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex);
13716 if(exportMemoryAllocInfo.handleTypes != 0)
13718 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
13722 VkDeviceMemory mem = VK_NULL_HANDLE;
13723 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13732 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13738 allocInfo.allocationSize,
13742 m_Blocks.push_back(pBlock);
13743 if(pNewBlockIndex != VMA_NULL)
13745 *pNewBlockIndex = m_Blocks.size() - 1;
13751 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13752 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13753 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13755 const size_t blockCount = m_Blocks.size();
13756 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13760 BLOCK_FLAG_USED = 0x00000001,
13761 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13769 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13770 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13771 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13774 const size_t moveCount = moves.size();
13775 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13777 const VmaDefragmentationMove& move = moves[moveIndex];
13778 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13779 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13782 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13785 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13787 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13788 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13789 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13791 currBlockInfo.pMappedData = pBlock->GetMappedData();
13793 if(currBlockInfo.pMappedData == VMA_NULL)
13795 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13796 if(pDefragCtx->res == VK_SUCCESS)
13798 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13805 if(pDefragCtx->res == VK_SUCCESS)
13807 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13808 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13810 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13812 const VmaDefragmentationMove& move = moves[moveIndex];
13814 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13815 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13817 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13822 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13823 memRange.memory = pSrcBlock->GetDeviceMemory();
13824 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13825 memRange.size = VMA_MIN(
13826 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13827 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13828 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13833 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13834 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13835 static_cast<size_t>(move.size));
13837 if(IsCorruptionDetectionEnabled())
13839 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13840 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13846 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13847 memRange.memory = pDstBlock->GetDeviceMemory();
13848 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13849 memRange.size = VMA_MIN(
13850 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13851 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13852 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13859 for(
size_t blockIndex = blockCount; blockIndex--; )
13861 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13862 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13864 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13865 pBlock->Unmap(m_hAllocator, 1);
13870 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13871 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13872 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13873 VkCommandBuffer commandBuffer)
13875 const size_t blockCount = m_Blocks.size();
13877 pDefragCtx->blockContexts.resize(blockCount);
13878 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13881 const size_t moveCount = moves.size();
13882 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13884 const VmaDefragmentationMove& move = moves[moveIndex];
13889 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13890 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13894 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13898 VkBufferCreateInfo bufCreateInfo;
13899 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13901 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13903 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13904 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13905 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13907 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13908 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13909 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13910 if(pDefragCtx->res == VK_SUCCESS)
13912 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13913 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13920 if(pDefragCtx->res == VK_SUCCESS)
13922 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13924 const VmaDefragmentationMove& move = moves[moveIndex];
13926 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13927 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13929 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13931 VkBufferCopy region = {
13935 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13936 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13941 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13943 pDefragCtx->res = VK_NOT_READY;
13949 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13951 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13952 if(pBlock->m_pMetadata->IsEmpty())
13954 if(m_Blocks.size() > m_MinBlockCount)
13956 if(pDefragmentationStats != VMA_NULL)
13959 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13962 VmaVectorRemove(m_Blocks, blockIndex);
13963 pBlock->Destroy(m_hAllocator);
13964 vma_delete(m_hAllocator, pBlock);
13972 UpdateHasEmptyBlock();
13975 void VmaBlockVector::UpdateHasEmptyBlock()
13977 m_HasEmptyBlock =
false;
13978 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13980 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13981 if(pBlock->m_pMetadata->IsEmpty())
13983 m_HasEmptyBlock =
true;
13989 #if VMA_STATS_STRING_ENABLED
13991 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13993 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13995 json.BeginObject();
13999 const char* poolName = m_hParentPool->GetName();
14000 if(poolName != VMA_NULL && poolName[0] !=
'\0')
14002 json.WriteString(
"Name");
14003 json.WriteString(poolName);
14006 json.WriteString(
"MemoryTypeIndex");
14007 json.WriteNumber(m_MemoryTypeIndex);
14009 json.WriteString(
"BlockSize");
14010 json.WriteNumber(m_PreferredBlockSize);
14012 json.WriteString(
"BlockCount");
14013 json.BeginObject(
true);
14014 if(m_MinBlockCount > 0)
14016 json.WriteString(
"Min");
14017 json.WriteNumber((uint64_t)m_MinBlockCount);
14019 if(m_MaxBlockCount < SIZE_MAX)
14021 json.WriteString(
"Max");
14022 json.WriteNumber((uint64_t)m_MaxBlockCount);
14024 json.WriteString(
"Cur");
14025 json.WriteNumber((uint64_t)m_Blocks.size());
14028 if(m_FrameInUseCount > 0)
14030 json.WriteString(
"FrameInUseCount");
14031 json.WriteNumber(m_FrameInUseCount);
14034 if(m_Algorithm != 0)
14036 json.WriteString(
"Algorithm");
14037 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
14042 json.WriteString(
"PreferredBlockSize");
14043 json.WriteNumber(m_PreferredBlockSize);
14046 json.WriteString(
"Blocks");
14047 json.BeginObject();
14048 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14050 json.BeginString();
14051 json.ContinueString(m_Blocks[i]->GetId());
14054 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
14063 void VmaBlockVector::Defragment(
14064 class VmaBlockVectorDefragmentationContext* pCtx,
14066 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
14067 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
14068 VkCommandBuffer commandBuffer)
14070 pCtx->res = VK_SUCCESS;
14072 const VkMemoryPropertyFlags memPropFlags =
14073 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
14074 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
14076 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
14078 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
14079 !IsCorruptionDetectionEnabled() &&
14080 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
14083 if(canDefragmentOnCpu || canDefragmentOnGpu)
14085 bool defragmentOnGpu;
14087 if(canDefragmentOnGpu != canDefragmentOnCpu)
14089 defragmentOnGpu = canDefragmentOnGpu;
14094 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
14095 m_hAllocator->IsIntegratedGpu();
14098 bool overlappingMoveSupported = !defragmentOnGpu;
14100 if(m_hAllocator->m_UseMutex)
14104 if(!m_Mutex.TryLockWrite())
14106 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
14112 m_Mutex.LockWrite();
14113 pCtx->mutexLocked =
true;
14117 pCtx->Begin(overlappingMoveSupported, flags);
14121 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
14122 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
14123 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
14126 if(pStats != VMA_NULL)
14128 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
14129 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
14132 VMA_ASSERT(bytesMoved <= maxBytesToMove);
14133 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
14134 if(defragmentOnGpu)
14136 maxGpuBytesToMove -= bytesMoved;
14137 maxGpuAllocationsToMove -= allocationsMoved;
14141 maxCpuBytesToMove -= bytesMoved;
14142 maxCpuAllocationsToMove -= allocationsMoved;
14148 if(m_hAllocator->m_UseMutex)
14149 m_Mutex.UnlockWrite();
14151 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
14152 pCtx->res = VK_NOT_READY;
14157 if(pCtx->res >= VK_SUCCESS)
14159 if(defragmentOnGpu)
14161 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
14165 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
14171 void VmaBlockVector::DefragmentationEnd(
14172 class VmaBlockVectorDefragmentationContext* pCtx,
14178 VMA_ASSERT(pCtx->mutexLocked ==
false);
14182 m_Mutex.LockWrite();
14183 pCtx->mutexLocked =
true;
14187 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
14190 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
14192 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
14193 if(blockCtx.hBuffer)
14195 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
14199 if(pCtx->res >= VK_SUCCESS)
14201 FreeEmptyBlocks(pStats);
14205 if(pCtx->mutexLocked)
14207 VMA_ASSERT(m_hAllocator->m_UseMutex);
14208 m_Mutex.UnlockWrite();
14212 uint32_t VmaBlockVector::ProcessDefragmentations(
14213 class VmaBlockVectorDefragmentationContext *pCtx,
14216 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14218 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
14220 for(uint32_t i = 0; i < moveCount; ++ i)
14222 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
14225 pMove->
memory = move.pDstBlock->GetDeviceMemory();
14226 pMove->
offset = move.dstOffset;
14231 pCtx->defragmentationMovesProcessed += moveCount;
14236 void VmaBlockVector::CommitDefragmentations(
14237 class VmaBlockVectorDefragmentationContext *pCtx,
14240 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14242 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
14244 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
14246 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
14247 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
14250 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
14251 FreeEmptyBlocks(pStats);
14254 size_t VmaBlockVector::CalcAllocationCount()
const
14257 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14259 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
14264 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
14266 if(m_BufferImageGranularity == 1)
14270 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
14271 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
14273 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
14274 VMA_ASSERT(m_Algorithm == 0);
14275 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
14276 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
14284 void VmaBlockVector::MakePoolAllocationsLost(
14285 uint32_t currentFrameIndex,
14286 size_t* pLostAllocationCount)
14288 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14289 size_t lostAllocationCount = 0;
14290 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14292 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14293 VMA_ASSERT(pBlock);
14294 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
14296 if(pLostAllocationCount != VMA_NULL)
14298 *pLostAllocationCount = lostAllocationCount;
14302 VkResult VmaBlockVector::CheckCorruption()
14304 if(!IsCorruptionDetectionEnabled())
14306 return VK_ERROR_FEATURE_NOT_PRESENT;
14309 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14310 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14312 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14313 VMA_ASSERT(pBlock);
14314 VkResult res = pBlock->CheckCorruption(m_hAllocator);
14315 if(res != VK_SUCCESS)
14323 void VmaBlockVector::AddStats(
VmaStats* pStats)
14325 const uint32_t memTypeIndex = m_MemoryTypeIndex;
14326 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
14328 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14330 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14332 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14333 VMA_ASSERT(pBlock);
14334 VMA_HEAVY_ASSERT(pBlock->Validate());
14336 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
14337 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14338 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14339 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14346 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
14348 VmaBlockVector* pBlockVector,
14349 uint32_t currentFrameIndex,
14350 bool overlappingMoveSupported) :
14351 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14352 m_AllocationCount(0),
14353 m_AllAllocations(false),
14355 m_AllocationsMoved(0),
14356 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
14359 const size_t blockCount = m_pBlockVector->m_Blocks.size();
14360 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14362 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
14363 pBlockInfo->m_OriginalBlockIndex = blockIndex;
14364 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
14365 m_Blocks.push_back(pBlockInfo);
14369 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
14372 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
14374 for(
size_t i = m_Blocks.size(); i--; )
14376 vma_delete(m_hAllocator, m_Blocks[i]);
14380 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14383 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
14385 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
14386 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
14387 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
14389 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
14390 (*it)->m_Allocations.push_back(allocInfo);
14397 ++m_AllocationCount;
14401 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
14402 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14403 VkDeviceSize maxBytesToMove,
14404 uint32_t maxAllocationsToMove,
14405 bool freeOldAllocations)
14407 if(m_Blocks.empty())
14420 size_t srcBlockMinIndex = 0;
14433 size_t srcBlockIndex = m_Blocks.size() - 1;
14434 size_t srcAllocIndex = SIZE_MAX;
14440 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14442 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14445 if(srcBlockIndex == srcBlockMinIndex)
14452 srcAllocIndex = SIZE_MAX;
14457 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14461 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14462 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14464 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14465 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14466 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14467 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14470 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14472 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14473 VmaAllocationRequest dstAllocRequest;
14474 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14475 m_CurrentFrameIndex,
14476 m_pBlockVector->GetFrameInUseCount(),
14477 m_pBlockVector->GetBufferImageGranularity(),
14484 &dstAllocRequest) &&
14486 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14488 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14491 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14492 (m_BytesMoved + size > maxBytesToMove))
14497 VmaDefragmentationMove move = {};
14498 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14499 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14500 move.srcOffset = srcOffset;
14501 move.dstOffset = dstAllocRequest.offset;
14503 move.hAllocation = allocInfo.m_hAllocation;
14504 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14505 move.pDstBlock = pDstBlockInfo->m_pBlock;
14507 moves.push_back(move);
14509 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14513 allocInfo.m_hAllocation);
14515 if(freeOldAllocations)
14517 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14518 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14521 if(allocInfo.m_pChanged != VMA_NULL)
14523 *allocInfo.m_pChanged = VK_TRUE;
14526 ++m_AllocationsMoved;
14527 m_BytesMoved += size;
14529 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14537 if(srcAllocIndex > 0)
14543 if(srcBlockIndex > 0)
14546 srcAllocIndex = SIZE_MAX;
14556 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14559 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14561 if(m_Blocks[i]->m_HasNonMovableAllocations)
14569 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14570 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14571 VkDeviceSize maxBytesToMove,
14572 uint32_t maxAllocationsToMove,
14575 if(!m_AllAllocations && m_AllocationCount == 0)
14580 const size_t blockCount = m_Blocks.size();
14581 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14583 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14585 if(m_AllAllocations)
14587 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14588 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14589 it != pMetadata->m_Suballocations.end();
14592 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14594 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14595 pBlockInfo->m_Allocations.push_back(allocInfo);
14600 pBlockInfo->CalcHasNonMovableAllocations();
14604 pBlockInfo->SortAllocationsByOffsetDescending();
14610 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14613 const uint32_t roundCount = 2;
14616 VkResult result = VK_SUCCESS;
14617 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14625 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14626 size_t dstBlockIndex, VkDeviceSize dstOffset,
14627 size_t srcBlockIndex, VkDeviceSize srcOffset)
14629 if(dstBlockIndex < srcBlockIndex)
14633 if(dstBlockIndex > srcBlockIndex)
14637 if(dstOffset < srcOffset)
14647 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14649 VmaBlockVector* pBlockVector,
14650 uint32_t currentFrameIndex,
14651 bool overlappingMoveSupported) :
14652 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14653 m_OverlappingMoveSupported(overlappingMoveSupported),
14654 m_AllocationCount(0),
14655 m_AllAllocations(false),
14657 m_AllocationsMoved(0),
14658 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14660 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14664 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14668 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14669 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14670 VkDeviceSize maxBytesToMove,
14671 uint32_t maxAllocationsToMove,
14674 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14676 const size_t blockCount = m_pBlockVector->GetBlockCount();
14677 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14682 PreprocessMetadata();
14686 m_BlockInfos.resize(blockCount);
14687 for(
size_t i = 0; i < blockCount; ++i)
14689 m_BlockInfos[i].origBlockIndex = i;
14692 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14693 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14694 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14699 FreeSpaceDatabase freeSpaceDb;
14701 size_t dstBlockInfoIndex = 0;
14702 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14703 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14704 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14705 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14706 VkDeviceSize dstOffset = 0;
14709 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14711 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14712 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14713 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14714 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14715 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14717 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14718 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14719 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14720 if(m_AllocationsMoved == maxAllocationsToMove ||
14721 m_BytesMoved + srcAllocSize > maxBytesToMove)
14726 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14728 VmaDefragmentationMove move = {};
14730 size_t freeSpaceInfoIndex;
14731 VkDeviceSize dstAllocOffset;
14732 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14733 freeSpaceInfoIndex, dstAllocOffset))
14735 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14736 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14737 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14740 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14742 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14746 VmaSuballocation suballoc = *srcSuballocIt;
14747 suballoc.offset = dstAllocOffset;
14748 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14749 m_BytesMoved += srcAllocSize;
14750 ++m_AllocationsMoved;
14752 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14754 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14755 srcSuballocIt = nextSuballocIt;
14757 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14759 move.srcBlockIndex = srcOrigBlockIndex;
14760 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14761 move.srcOffset = srcAllocOffset;
14762 move.dstOffset = dstAllocOffset;
14763 move.size = srcAllocSize;
14765 moves.push_back(move);
14772 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14774 VmaSuballocation suballoc = *srcSuballocIt;
14775 suballoc.offset = dstAllocOffset;
14776 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14777 m_BytesMoved += srcAllocSize;
14778 ++m_AllocationsMoved;
14780 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14782 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14783 srcSuballocIt = nextSuballocIt;
14785 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14787 move.srcBlockIndex = srcOrigBlockIndex;
14788 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14789 move.srcOffset = srcAllocOffset;
14790 move.dstOffset = dstAllocOffset;
14791 move.size = srcAllocSize;
14793 moves.push_back(move);
14798 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14801 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14802 dstAllocOffset + srcAllocSize > dstBlockSize)
14805 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14807 ++dstBlockInfoIndex;
14808 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14809 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14810 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14811 dstBlockSize = pDstMetadata->GetSize();
14813 dstAllocOffset = 0;
14817 if(dstBlockInfoIndex == srcBlockInfoIndex)
14819 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14821 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14823 bool skipOver = overlap;
14824 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14828 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14833 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14835 dstOffset = srcAllocOffset + srcAllocSize;
14841 srcSuballocIt->offset = dstAllocOffset;
14842 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14843 dstOffset = dstAllocOffset + srcAllocSize;
14844 m_BytesMoved += srcAllocSize;
14845 ++m_AllocationsMoved;
14848 move.srcBlockIndex = srcOrigBlockIndex;
14849 move.dstBlockIndex = dstOrigBlockIndex;
14850 move.srcOffset = srcAllocOffset;
14851 move.dstOffset = dstAllocOffset;
14852 move.size = srcAllocSize;
14854 moves.push_back(move);
14862 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14863 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14865 VmaSuballocation suballoc = *srcSuballocIt;
14866 suballoc.offset = dstAllocOffset;
14867 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14868 dstOffset = dstAllocOffset + srcAllocSize;
14869 m_BytesMoved += srcAllocSize;
14870 ++m_AllocationsMoved;
14872 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14874 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14875 srcSuballocIt = nextSuballocIt;
14877 pDstMetadata->m_Suballocations.push_back(suballoc);
14879 move.srcBlockIndex = srcOrigBlockIndex;
14880 move.dstBlockIndex = dstOrigBlockIndex;
14881 move.srcOffset = srcAllocOffset;
14882 move.dstOffset = dstAllocOffset;
14883 move.size = srcAllocSize;
14885 moves.push_back(move);
14891 m_BlockInfos.clear();
14893 PostprocessMetadata();
14898 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14900 const size_t blockCount = m_pBlockVector->GetBlockCount();
14901 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14903 VmaBlockMetadata_Generic*
const pMetadata =
14904 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14905 pMetadata->m_FreeCount = 0;
14906 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14907 pMetadata->m_FreeSuballocationsBySize.clear();
14908 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14909 it != pMetadata->m_Suballocations.end(); )
14911 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14913 VmaSuballocationList::iterator nextIt = it;
14915 pMetadata->m_Suballocations.erase(it);
14926 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14928 const size_t blockCount = m_pBlockVector->GetBlockCount();
14929 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14931 VmaBlockMetadata_Generic*
const pMetadata =
14932 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14933 const VkDeviceSize blockSize = pMetadata->GetSize();
14936 if(pMetadata->m_Suballocations.empty())
14938 pMetadata->m_FreeCount = 1;
14940 VmaSuballocation suballoc = {
14944 VMA_SUBALLOCATION_TYPE_FREE };
14945 pMetadata->m_Suballocations.push_back(suballoc);
14946 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14951 VkDeviceSize offset = 0;
14952 VmaSuballocationList::iterator it;
14953 for(it = pMetadata->m_Suballocations.begin();
14954 it != pMetadata->m_Suballocations.end();
14957 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14958 VMA_ASSERT(it->offset >= offset);
14961 if(it->offset > offset)
14963 ++pMetadata->m_FreeCount;
14964 const VkDeviceSize freeSize = it->offset - offset;
14965 VmaSuballocation suballoc = {
14969 VMA_SUBALLOCATION_TYPE_FREE };
14970 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14971 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14973 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14977 pMetadata->m_SumFreeSize -= it->size;
14978 offset = it->offset + it->size;
14982 if(offset < blockSize)
14984 ++pMetadata->m_FreeCount;
14985 const VkDeviceSize freeSize = blockSize - offset;
14986 VmaSuballocation suballoc = {
14990 VMA_SUBALLOCATION_TYPE_FREE };
14991 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14992 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14993 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14995 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
15000 pMetadata->m_FreeSuballocationsBySize.begin(),
15001 pMetadata->m_FreeSuballocationsBySize.end(),
15002 VmaSuballocationItemSizeLess());
15005 VMA_HEAVY_ASSERT(pMetadata->Validate());
15009 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
15012 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
15013 while(it != pMetadata->m_Suballocations.end())
15015 if(it->offset < suballoc.offset)
15020 pMetadata->m_Suballocations.insert(it, suballoc);
15026 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
15029 VmaBlockVector* pBlockVector,
15030 uint32_t currFrameIndex) :
15032 mutexLocked(false),
15033 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
15034 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
15035 defragmentationMovesProcessed(0),
15036 defragmentationMovesCommitted(0),
15037 hasDefragmentationPlan(0),
15038 m_hAllocator(hAllocator),
15039 m_hCustomPool(hCustomPool),
15040 m_pBlockVector(pBlockVector),
15041 m_CurrFrameIndex(currFrameIndex),
15042 m_pAlgorithm(VMA_NULL),
15043 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
15044 m_AllAllocations(false)
15048 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
15050 vma_delete(m_hAllocator, m_pAlgorithm);
15053 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
15055 AllocInfo info = { hAlloc, pChanged };
15056 m_Allocations.push_back(info);
15059 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
15061 const bool allAllocations = m_AllAllocations ||
15062 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
15075 if(VMA_DEBUG_MARGIN == 0 &&
15077 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
15080 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
15081 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15085 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
15086 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15091 m_pAlgorithm->AddAll();
15095 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
15097 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
15105 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
15107 uint32_t currFrameIndex,
15110 m_hAllocator(hAllocator),
15111 m_CurrFrameIndex(currFrameIndex),
15114 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
15116 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
15119 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
15121 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15123 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
15124 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15125 vma_delete(m_hAllocator, pBlockVectorCtx);
15127 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
15129 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
15130 if(pBlockVectorCtx)
15132 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15133 vma_delete(m_hAllocator, pBlockVectorCtx);
15138 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
15140 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15142 VmaPool pool = pPools[poolIndex];
15145 if(pool->m_BlockVector.GetAlgorithm() == 0)
15147 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15149 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15151 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
15153 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15158 if(!pBlockVectorDefragCtx)
15160 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15163 &pool->m_BlockVector,
15165 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15168 pBlockVectorDefragCtx->AddAll();
15173 void VmaDefragmentationContext_T::AddAllocations(
15174 uint32_t allocationCount,
15176 VkBool32* pAllocationsChanged)
15179 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15182 VMA_ASSERT(hAlloc);
15184 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
15186 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
15188 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15190 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
15192 if(hAllocPool != VK_NULL_HANDLE)
15195 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
15197 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15199 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
15201 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15205 if(!pBlockVectorDefragCtx)
15207 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15210 &hAllocPool->m_BlockVector,
15212 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15219 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
15220 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
15221 if(!pBlockVectorDefragCtx)
15223 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15226 m_hAllocator->m_pBlockVectors[memTypeIndex],
15228 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
15232 if(pBlockVectorDefragCtx)
15234 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
15235 &pAllocationsChanged[allocIndex] : VMA_NULL;
15236 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
15242 VkResult VmaDefragmentationContext_T::Defragment(
15243 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
15244 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
15256 m_MaxCpuBytesToMove = maxCpuBytesToMove;
15257 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
15259 m_MaxGpuBytesToMove = maxGpuBytesToMove;
15260 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
15262 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
15263 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
15266 return VK_NOT_READY;
15269 if(commandBuffer == VK_NULL_HANDLE)
15271 maxGpuBytesToMove = 0;
15272 maxGpuAllocationsToMove = 0;
15275 VkResult res = VK_SUCCESS;
15278 for(uint32_t memTypeIndex = 0;
15279 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
15282 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15283 if(pBlockVectorCtx)
15285 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15286 pBlockVectorCtx->GetBlockVector()->Defragment(
15289 maxCpuBytesToMove, maxCpuAllocationsToMove,
15290 maxGpuBytesToMove, maxGpuAllocationsToMove,
15292 if(pBlockVectorCtx->res != VK_SUCCESS)
15294 res = pBlockVectorCtx->res;
15300 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15301 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
15304 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15305 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15306 pBlockVectorCtx->GetBlockVector()->Defragment(
15309 maxCpuBytesToMove, maxCpuAllocationsToMove,
15310 maxGpuBytesToMove, maxGpuAllocationsToMove,
15312 if(pBlockVectorCtx->res != VK_SUCCESS)
15314 res = pBlockVectorCtx->res;
15327 for(uint32_t memTypeIndex = 0;
15328 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15331 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15332 if(pBlockVectorCtx)
15334 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15336 if(!pBlockVectorCtx->hasDefragmentationPlan)
15338 pBlockVectorCtx->GetBlockVector()->Defragment(
15341 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15342 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15345 if(pBlockVectorCtx->res < VK_SUCCESS)
15348 pBlockVectorCtx->hasDefragmentationPlan =
true;
15351 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15353 pCurrentMove, movesLeft);
15355 movesLeft -= processed;
15356 pCurrentMove += processed;
15361 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15362 customCtxIndex < customCtxCount;
15365 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15366 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15368 if(!pBlockVectorCtx->hasDefragmentationPlan)
15370 pBlockVectorCtx->GetBlockVector()->Defragment(
15373 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15374 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15377 if(pBlockVectorCtx->res < VK_SUCCESS)
15380 pBlockVectorCtx->hasDefragmentationPlan =
true;
15383 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15385 pCurrentMove, movesLeft);
15387 movesLeft -= processed;
15388 pCurrentMove += processed;
15395 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
15397 VkResult res = VK_SUCCESS;
15400 for(uint32_t memTypeIndex = 0;
15401 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15404 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15405 if(pBlockVectorCtx)
15407 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15409 if(!pBlockVectorCtx->hasDefragmentationPlan)
15411 res = VK_NOT_READY;
15415 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15416 pBlockVectorCtx, m_pStats);
15418 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15419 res = VK_NOT_READY;
15424 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15425 customCtxIndex < customCtxCount;
15428 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15429 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15431 if(!pBlockVectorCtx->hasDefragmentationPlan)
15433 res = VK_NOT_READY;
15437 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15438 pBlockVectorCtx, m_pStats);
15440 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15441 res = VK_NOT_READY;
15450 #if VMA_RECORDING_ENABLED
15452 VmaRecorder::VmaRecorder() :
15456 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15462 m_UseMutex = useMutex;
15463 m_Flags = settings.
flags;
15465 #if defined(_WIN32)
15467 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15471 return VK_ERROR_INITIALIZATION_FAILED;
15475 m_File = fopen(settings.
pFilePath,
"wb");
15479 return VK_ERROR_INITIALIZATION_FAILED;
15484 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15485 fprintf(m_File,
"%s\n",
"1,8");
15490 VmaRecorder::~VmaRecorder()
15492 if(m_File != VMA_NULL)
15498 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15500 CallParams callParams;
15501 GetBasicParams(callParams);
15503 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15504 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15508 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15510 CallParams callParams;
15511 GetBasicParams(callParams);
15513 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15514 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15520 CallParams callParams;
15521 GetBasicParams(callParams);
15523 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15524 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15535 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15537 CallParams callParams;
15538 GetBasicParams(callParams);
15540 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15541 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15546 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15547 const VkMemoryRequirements& vkMemReq,
15551 CallParams callParams;
15552 GetBasicParams(callParams);
15554 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15555 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15556 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15558 vkMemReq.alignment,
15559 vkMemReq.memoryTypeBits,
15567 userDataStr.GetString());
15571 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15572 const VkMemoryRequirements& vkMemReq,
15574 uint64_t allocationCount,
15577 CallParams callParams;
15578 GetBasicParams(callParams);
15580 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15581 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15582 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15584 vkMemReq.alignment,
15585 vkMemReq.memoryTypeBits,
15592 PrintPointerList(allocationCount, pAllocations);
15593 fprintf(m_File,
",%s\n", userDataStr.GetString());
15597 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15598 const VkMemoryRequirements& vkMemReq,
15599 bool requiresDedicatedAllocation,
15600 bool prefersDedicatedAllocation,
15604 CallParams callParams;
15605 GetBasicParams(callParams);
15607 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15608 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15609 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15611 vkMemReq.alignment,
15612 vkMemReq.memoryTypeBits,
15613 requiresDedicatedAllocation ? 1 : 0,
15614 prefersDedicatedAllocation ? 1 : 0,
15622 userDataStr.GetString());
15626 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15627 const VkMemoryRequirements& vkMemReq,
15628 bool requiresDedicatedAllocation,
15629 bool prefersDedicatedAllocation,
15633 CallParams callParams;
15634 GetBasicParams(callParams);
15636 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15637 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15638 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15640 vkMemReq.alignment,
15641 vkMemReq.memoryTypeBits,
15642 requiresDedicatedAllocation ? 1 : 0,
15643 prefersDedicatedAllocation ? 1 : 0,
15651 userDataStr.GetString());
15655 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15658 CallParams callParams;
15659 GetBasicParams(callParams);
15661 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15662 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15667 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15668 uint64_t allocationCount,
15671 CallParams callParams;
15672 GetBasicParams(callParams);
15674 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15675 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15676 PrintPointerList(allocationCount, pAllocations);
15677 fprintf(m_File,
"\n");
15681 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15683 const void* pUserData)
15685 CallParams callParams;
15686 GetBasicParams(callParams);
15688 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15689 UserDataString userDataStr(
15692 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15694 userDataStr.GetString());
15698 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15701 CallParams callParams;
15702 GetBasicParams(callParams);
15704 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15705 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15710 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15713 CallParams callParams;
15714 GetBasicParams(callParams);
15716 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15717 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15722 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15725 CallParams callParams;
15726 GetBasicParams(callParams);
15728 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15729 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15734 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15735 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15737 CallParams callParams;
15738 GetBasicParams(callParams);
15740 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15741 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15748 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15749 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15751 CallParams callParams;
15752 GetBasicParams(callParams);
15754 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15755 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15762 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15763 const VkBufferCreateInfo& bufCreateInfo,
15767 CallParams callParams;
15768 GetBasicParams(callParams);
15770 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15771 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15772 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15773 bufCreateInfo.flags,
15774 bufCreateInfo.size,
15775 bufCreateInfo.usage,
15776 bufCreateInfo.sharingMode,
15777 allocCreateInfo.
flags,
15778 allocCreateInfo.
usage,
15782 allocCreateInfo.
pool,
15784 userDataStr.GetString());
15788 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15789 const VkImageCreateInfo& imageCreateInfo,
15793 CallParams callParams;
15794 GetBasicParams(callParams);
15796 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15797 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15798 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15799 imageCreateInfo.flags,
15800 imageCreateInfo.imageType,
15801 imageCreateInfo.format,
15802 imageCreateInfo.extent.width,
15803 imageCreateInfo.extent.height,
15804 imageCreateInfo.extent.depth,
15805 imageCreateInfo.mipLevels,
15806 imageCreateInfo.arrayLayers,
15807 imageCreateInfo.samples,
15808 imageCreateInfo.tiling,
15809 imageCreateInfo.usage,
15810 imageCreateInfo.sharingMode,
15811 imageCreateInfo.initialLayout,
15812 allocCreateInfo.
flags,
15813 allocCreateInfo.
usage,
15817 allocCreateInfo.
pool,
15819 userDataStr.GetString());
15823 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15826 CallParams callParams;
15827 GetBasicParams(callParams);
15829 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15830 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15835 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15838 CallParams callParams;
15839 GetBasicParams(callParams);
15841 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15842 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15847 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15850 CallParams callParams;
15851 GetBasicParams(callParams);
15853 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15854 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15859 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15862 CallParams callParams;
15863 GetBasicParams(callParams);
15865 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15866 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15871 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15874 CallParams callParams;
15875 GetBasicParams(callParams);
15877 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15878 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15883 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15887 CallParams callParams;
15888 GetBasicParams(callParams);
15890 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15891 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15894 fprintf(m_File,
",");
15896 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15906 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15909 CallParams callParams;
15910 GetBasicParams(callParams);
15912 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15913 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15918 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15922 CallParams callParams;
15923 GetBasicParams(callParams);
15925 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15926 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15927 pool, name != VMA_NULL ? name :
"");
15933 if(pUserData != VMA_NULL)
15937 m_Str = (
const char*)pUserData;
15942 snprintf(m_PtrStr, 17,
"%p", pUserData);
15952 void VmaRecorder::WriteConfiguration(
15953 const VkPhysicalDeviceProperties& devProps,
15954 const VkPhysicalDeviceMemoryProperties& memProps,
15955 uint32_t vulkanApiVersion,
15956 bool dedicatedAllocationExtensionEnabled,
15957 bool bindMemory2ExtensionEnabled,
15958 bool memoryBudgetExtensionEnabled,
15959 bool deviceCoherentMemoryExtensionEnabled)
15961 fprintf(m_File,
"Config,Begin\n");
15963 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15965 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15966 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15967 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15968 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15969 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15970 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15972 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15973 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15974 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15976 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15977 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15979 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15980 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15982 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15983 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15985 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15986 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15989 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15990 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15991 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15992 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15994 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15995 fprintf(m_File,
"Macro,VMA_MIN_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_MIN_ALIGNMENT);
15996 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15997 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15998 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15999 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
16000 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
16001 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
16002 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
16004 fprintf(m_File,
"Config,End\n");
16007 void VmaRecorder::GetBasicParams(CallParams& outParams)
16009 #if defined(_WIN32)
16010 outParams.threadId = GetCurrentThreadId();
16015 std::thread::id thread_id = std::this_thread::get_id();
16016 std::stringstream thread_id_to_string_converter;
16017 thread_id_to_string_converter << thread_id;
16018 std::string thread_id_as_string = thread_id_to_string_converter.str();
16019 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
16022 auto current_time = std::chrono::high_resolution_clock::now();
16024 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
16027 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
16031 fprintf(m_File,
"%p", pItems[0]);
16032 for(uint64_t i = 1; i < count; ++i)
16034 fprintf(m_File,
" %p", pItems[i]);
16039 void VmaRecorder::Flush()
16052 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
16053 m_Allocator(pAllocationCallbacks, 1024)
16057 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
16059 VmaMutexLock mutexLock(m_Mutex);
16060 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
16063 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
16065 VmaMutexLock mutexLock(m_Mutex);
16066 m_Allocator.Free(hAlloc);
16074 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
16081 m_hDevice(pCreateInfo->device),
16082 m_hInstance(pCreateInfo->instance),
16083 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
16084 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
16085 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
16086 m_AllocationObjectAllocator(&m_AllocationCallbacks),
16087 m_HeapSizeLimitMask(0),
16088 m_DeviceMemoryCount(0),
16089 m_PreferredLargeHeapBlockSize(0),
16090 m_PhysicalDevice(pCreateInfo->physicalDevice),
16091 m_CurrentFrameIndex(0),
16092 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
16094 m_GlobalMemoryTypeBits(UINT32_MAX)
16096 ,m_pRecorder(VMA_NULL)
16099 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16101 m_UseKhrDedicatedAllocation =
false;
16102 m_UseKhrBindMemory2 =
false;
16105 if(VMA_DEBUG_DETECT_CORRUPTION)
16108 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
16113 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
16115 #if !(VMA_DEDICATED_ALLOCATION)
16118 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
16121 #if !(VMA_BIND_MEMORY2)
16124 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
16128 #if !(VMA_MEMORY_BUDGET)
16131 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
16134 #if !(VMA_BUFFER_DEVICE_ADDRESS)
16135 if(m_UseKhrBufferDeviceAddress)
16137 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16140 #if VMA_VULKAN_VERSION < 1002000
16141 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
16143 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
16146 #if VMA_VULKAN_VERSION < 1001000
16147 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16149 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
16152 #if !(VMA_MEMORY_PRIORITY)
16153 if(m_UseExtMemoryPriority)
16155 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16159 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
16160 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
16161 memset(&m_MemProps, 0,
sizeof(m_MemProps));
16163 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
16164 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
16166 #if VMA_EXTERNAL_MEMORY
16167 memset(&m_TypeExternalMemoryHandleTypes, 0,
sizeof(m_TypeExternalMemoryHandleTypes));
16179 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
16180 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
16182 VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT));
16183 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
16184 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
16185 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
16190 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
16192 #if VMA_EXTERNAL_MEMORY
16196 sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount());
16202 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16204 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
16205 if(limit != VK_WHOLE_SIZE)
16207 m_HeapSizeLimitMask |= 1u << heapIndex;
16208 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
16210 m_MemProps.memoryHeaps[heapIndex].size = limit;
16216 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16218 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
16220 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
16224 preferredBlockSize,
16227 GetBufferImageGranularity(),
16232 GetMemoryTypeMinAlignment(memTypeIndex),
16241 VkResult res = VK_SUCCESS;
16246 #if VMA_RECORDING_ENABLED
16247 m_pRecorder = vma_new(
this, VmaRecorder)();
16249 if(res != VK_SUCCESS)
16253 m_pRecorder->WriteConfiguration(
16254 m_PhysicalDeviceProperties,
16256 m_VulkanApiVersion,
16257 m_UseKhrDedicatedAllocation,
16258 m_UseKhrBindMemory2,
16259 m_UseExtMemoryBudget,
16260 m_UseAmdDeviceCoherentMemory);
16261 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
16263 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
16264 return VK_ERROR_FEATURE_NOT_PRESENT;
16268 #if VMA_MEMORY_BUDGET
16269 if(m_UseExtMemoryBudget)
16271 UpdateVulkanBudget();
16278 VmaAllocator_T::~VmaAllocator_T()
16280 #if VMA_RECORDING_ENABLED
16281 if(m_pRecorder != VMA_NULL)
16283 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
16284 vma_delete(
this, m_pRecorder);
16288 VMA_ASSERT(m_Pools.IsEmpty());
16290 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
16292 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
16294 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
16297 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
16301 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
16303 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16304 ImportVulkanFunctions_Static();
16307 if(pVulkanFunctions != VMA_NULL)
16309 ImportVulkanFunctions_Custom(pVulkanFunctions);
16312 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16313 ImportVulkanFunctions_Dynamic();
16316 ValidateVulkanFunctions();
16319 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16321 void VmaAllocator_T::ImportVulkanFunctions_Static()
16324 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
16325 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
16326 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
16327 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
16328 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
16329 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
16330 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
16331 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
16332 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
16333 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
16334 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
16335 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
16336 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
16337 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
16338 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
16339 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
16340 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
16343 #if VMA_VULKAN_VERSION >= 1001000
16344 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16346 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
16347 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
16348 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
16349 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
16350 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
16357 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
16359 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
16361 #define VMA_COPY_IF_NOT_NULL(funcName) \
16362 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
16364 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
16365 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
16366 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
16367 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
16368 VMA_COPY_IF_NOT_NULL(vkMapMemory);
16369 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
16370 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
16371 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
16372 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
16373 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
16374 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
16375 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
16376 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
16377 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
16378 VMA_COPY_IF_NOT_NULL(vkCreateImage);
16379 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
16380 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
16382 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16383 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
16384 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
16387 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16388 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
16389 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
16392 #if VMA_MEMORY_BUDGET
16393 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
16396 #undef VMA_COPY_IF_NOT_NULL
16399 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16401 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
16403 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
16404 if(m_VulkanFunctions.memberName == VMA_NULL) \
16405 m_VulkanFunctions.memberName = \
16406 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
16407 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
16408 if(m_VulkanFunctions.memberName == VMA_NULL) \
16409 m_VulkanFunctions.memberName = \
16410 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
16412 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
16413 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
16414 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
16415 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
16416 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
16417 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
16418 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
16419 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
16420 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
16421 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
16422 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
16423 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
16424 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
16425 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
16426 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
16427 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
16428 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16430 #if VMA_VULKAN_VERSION >= 1001000
16431 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16433 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16434 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16435 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16436 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16437 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16441 #if VMA_DEDICATED_ALLOCATION
16442 if(m_UseKhrDedicatedAllocation)
16444 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16445 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16449 #if VMA_BIND_MEMORY2
16450 if(m_UseKhrBindMemory2)
16452 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16453 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16457 #if VMA_MEMORY_BUDGET
16458 if(m_UseExtMemoryBudget)
16460 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16464 #undef VMA_FETCH_DEVICE_FUNC
16465 #undef VMA_FETCH_INSTANCE_FUNC
16470 void VmaAllocator_T::ValidateVulkanFunctions()
16472 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16473 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16474 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16475 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16476 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16477 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16478 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16479 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16480 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16481 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16482 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16483 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16484 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16485 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16486 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16487 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16488 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16490 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16491 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16493 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16494 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16498 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16499 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16501 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16502 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16506 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16507 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16509 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16514 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16516 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16517 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16518 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16519 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16522 VkResult VmaAllocator_T::AllocateMemoryOfType(
16524 VkDeviceSize alignment,
16525 bool dedicatedAllocation,
16526 VkBuffer dedicatedBuffer,
16527 VkBufferUsageFlags dedicatedBufferUsage,
16528 VkImage dedicatedImage,
16530 uint32_t memTypeIndex,
16531 VmaSuballocationType suballocType,
16532 size_t allocationCount,
16535 VMA_ASSERT(pAllocations != VMA_NULL);
16536 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16542 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16552 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16553 VMA_ASSERT(blockVector);
16555 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16556 bool preferDedicatedMemory =
16557 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16558 dedicatedAllocation ||
16560 size > preferredBlockSize / 2;
16562 if(preferDedicatedMemory &&
16564 finalCreateInfo.
pool == VK_NULL_HANDLE)
16573 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16577 return AllocateDedicatedMemory(
16587 dedicatedBufferUsage,
16595 VkResult res = blockVector->Allocate(
16596 m_CurrentFrameIndex.load(),
16603 if(res == VK_SUCCESS)
16611 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16617 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
16619 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16622 res = AllocateDedicatedMemory(
16632 dedicatedBufferUsage,
16636 if(res == VK_SUCCESS)
16639 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16645 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16651 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16653 VmaSuballocationType suballocType,
16654 uint32_t memTypeIndex,
16657 bool isUserDataString,
16660 VkBuffer dedicatedBuffer,
16661 VkBufferUsageFlags dedicatedBufferUsage,
16662 VkImage dedicatedImage,
16663 size_t allocationCount,
16666 VMA_ASSERT(allocationCount > 0 && pAllocations);
16670 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16672 GetBudget(&heapBudget, heapIndex, 1);
16673 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16675 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16679 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16680 allocInfo.memoryTypeIndex = memTypeIndex;
16681 allocInfo.allocationSize = size;
16683 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16684 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16685 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16687 if(dedicatedBuffer != VK_NULL_HANDLE)
16689 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16690 dedicatedAllocInfo.buffer = dedicatedBuffer;
16691 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16693 else if(dedicatedImage != VK_NULL_HANDLE)
16695 dedicatedAllocInfo.image = dedicatedImage;
16696 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16701 #if VMA_BUFFER_DEVICE_ADDRESS
16702 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16703 if(m_UseKhrBufferDeviceAddress)
16705 bool canContainBufferWithDeviceAddress =
true;
16706 if(dedicatedBuffer != VK_NULL_HANDLE)
16708 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16709 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16711 else if(dedicatedImage != VK_NULL_HANDLE)
16713 canContainBufferWithDeviceAddress =
false;
16715 if(canContainBufferWithDeviceAddress)
16717 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16718 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16723 #if VMA_MEMORY_PRIORITY
16724 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
16725 if(m_UseExtMemoryPriority)
16727 priorityInfo.priority = priority;
16728 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
16732 #if VMA_EXTERNAL_MEMORY
16734 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
16735 exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex);
16736 if(exportMemoryAllocInfo.handleTypes != 0)
16738 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
16743 VkResult res = VK_SUCCESS;
16744 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16746 res = AllocateDedicatedMemoryPage(
16754 pAllocations + allocIndex);
16755 if(res != VK_SUCCESS)
16761 if(res == VK_SUCCESS)
16765 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16766 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
16767 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16769 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
16773 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16778 while(allocIndex--)
16781 VkDeviceMemory hMemory = currAlloc->GetMemory();
16793 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16794 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16795 currAlloc->SetUserData(
this, VMA_NULL);
16796 m_AllocationObjectAllocator.Free(currAlloc);
16799 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16805 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16807 VmaSuballocationType suballocType,
16808 uint32_t memTypeIndex,
16809 const VkMemoryAllocateInfo& allocInfo,
16811 bool isUserDataString,
16815 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16816 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16819 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16823 void* pMappedData = VMA_NULL;
16826 res = (*m_VulkanFunctions.vkMapMemory)(
16835 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16836 FreeVulkanMemory(memTypeIndex, size, hMemory);
16841 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16842 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16843 (*pAllocation)->SetUserData(
this, pUserData);
16844 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16845 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16847 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16853 void VmaAllocator_T::GetBufferMemoryRequirements(
16855 VkMemoryRequirements& memReq,
16856 bool& requiresDedicatedAllocation,
16857 bool& prefersDedicatedAllocation)
const
16859 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16860 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16862 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16863 memReqInfo.buffer = hBuffer;
16865 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16867 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16868 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16870 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16872 memReq = memReq2.memoryRequirements;
16873 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16874 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16879 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16880 requiresDedicatedAllocation =
false;
16881 prefersDedicatedAllocation =
false;
16885 void VmaAllocator_T::GetImageMemoryRequirements(
16887 VkMemoryRequirements& memReq,
16888 bool& requiresDedicatedAllocation,
16889 bool& prefersDedicatedAllocation)
const
16891 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16892 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16894 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16895 memReqInfo.image = hImage;
16897 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16899 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16900 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16902 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16904 memReq = memReq2.memoryRequirements;
16905 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16906 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16911 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16912 requiresDedicatedAllocation =
false;
16913 prefersDedicatedAllocation =
false;
16917 VkResult VmaAllocator_T::AllocateMemory(
16918 const VkMemoryRequirements& vkMemReq,
16919 bool requiresDedicatedAllocation,
16920 bool prefersDedicatedAllocation,
16921 VkBuffer dedicatedBuffer,
16922 VkBufferUsageFlags dedicatedBufferUsage,
16923 VkImage dedicatedImage,
16925 VmaSuballocationType suballocType,
16926 size_t allocationCount,
16929 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16931 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16933 if(vkMemReq.size == 0)
16935 return VK_ERROR_VALIDATION_FAILED_EXT;
16940 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16941 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16946 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16947 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16949 if(requiresDedicatedAllocation)
16953 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16954 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16956 if(createInfo.
pool != VK_NULL_HANDLE)
16958 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16959 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16962 if((createInfo.
pool != VK_NULL_HANDLE) &&
16965 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16966 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16969 if(createInfo.
pool != VK_NULL_HANDLE)
16974 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16979 return createInfo.
pool->m_BlockVector.Allocate(
16980 m_CurrentFrameIndex.load(),
16982 vkMemReq.alignment,
16991 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16992 uint32_t memTypeIndex = UINT32_MAX;
16994 if(res == VK_SUCCESS)
16996 res = AllocateMemoryOfType(
16998 vkMemReq.alignment,
16999 requiresDedicatedAllocation || prefersDedicatedAllocation,
17001 dedicatedBufferUsage,
17009 if(res == VK_SUCCESS)
17019 memoryTypeBits &= ~(1u << memTypeIndex);
17022 if(res == VK_SUCCESS)
17024 res = AllocateMemoryOfType(
17026 vkMemReq.alignment,
17027 requiresDedicatedAllocation || prefersDedicatedAllocation,
17029 dedicatedBufferUsage,
17037 if(res == VK_SUCCESS)
17047 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17058 void VmaAllocator_T::FreeMemory(
17059 size_t allocationCount,
17062 VMA_ASSERT(pAllocations);
17064 for(
size_t allocIndex = allocationCount; allocIndex--; )
17068 if(allocation != VK_NULL_HANDLE)
17070 if(TouchAllocation(allocation))
17072 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
17074 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
17077 switch(allocation->GetType())
17079 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17081 VmaBlockVector* pBlockVector = VMA_NULL;
17082 VmaPool hPool = allocation->GetBlock()->GetParentPool();
17083 if(hPool != VK_NULL_HANDLE)
17085 pBlockVector = &hPool->m_BlockVector;
17089 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17090 pBlockVector = m_pBlockVectors[memTypeIndex];
17092 pBlockVector->Free(allocation);
17095 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17096 FreeDedicatedMemory(allocation);
17104 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
17105 allocation->SetUserData(
this, VMA_NULL);
17106 m_AllocationObjectAllocator.Free(allocation);
17111 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
17114 InitStatInfo(pStats->
total);
17115 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
17117 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
17121 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17123 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17124 VMA_ASSERT(pBlockVector);
17125 pBlockVector->AddStats(pStats);
17130 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17131 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17133 pool->m_BlockVector.AddStats(pStats);
17138 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17140 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
17141 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17142 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17144 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
17147 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
17148 VmaAddStatInfo(pStats->
total, allocationStatInfo);
17149 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
17150 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
17155 VmaPostprocessCalcStatInfo(pStats->
total);
17156 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
17157 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
17158 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
17159 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
17162 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
17164 #if VMA_MEMORY_BUDGET
17165 if(m_UseExtMemoryBudget)
17167 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
17169 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
17170 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17172 const uint32_t heapIndex = firstHeap + i;
17174 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17177 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
17179 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
17180 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17184 outBudget->
usage = 0;
17188 outBudget->
budget = VMA_MIN(
17189 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
17194 UpdateVulkanBudget();
17195 GetBudget(outBudget, firstHeap, heapCount);
17201 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17203 const uint32_t heapIndex = firstHeap + i;
17205 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17209 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17214 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
17216 VkResult VmaAllocator_T::DefragmentationBegin(
17226 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
17227 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
17230 (*pContext)->AddAllocations(
17233 VkResult res = (*pContext)->Defragment(
17238 if(res != VK_NOT_READY)
17240 vma_delete(
this, *pContext);
17241 *pContext = VMA_NULL;
17247 VkResult VmaAllocator_T::DefragmentationEnd(
17250 vma_delete(
this, context);
17254 VkResult VmaAllocator_T::DefragmentationPassBegin(
17258 return context->DefragmentPassBegin(pInfo);
17260 VkResult VmaAllocator_T::DefragmentationPassEnd(
17263 return context->DefragmentPassEnd();
17269 if(hAllocation->CanBecomeLost())
17275 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17276 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17279 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17283 pAllocationInfo->
offset = 0;
17284 pAllocationInfo->
size = hAllocation->GetSize();
17286 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17289 else if(localLastUseFrameIndex == localCurrFrameIndex)
17291 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17292 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17293 pAllocationInfo->
offset = hAllocation->GetOffset();
17294 pAllocationInfo->
size = hAllocation->GetSize();
17296 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17301 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17303 localLastUseFrameIndex = localCurrFrameIndex;
17310 #if VMA_STATS_STRING_ENABLED
17311 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17312 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17315 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17316 if(localLastUseFrameIndex == localCurrFrameIndex)
17322 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17324 localLastUseFrameIndex = localCurrFrameIndex;
17330 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17331 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17332 pAllocationInfo->
offset = hAllocation->GetOffset();
17333 pAllocationInfo->
size = hAllocation->GetSize();
17334 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
17335 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17339 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
17342 if(hAllocation->CanBecomeLost())
17344 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17345 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17348 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17352 else if(localLastUseFrameIndex == localCurrFrameIndex)
17358 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17360 localLastUseFrameIndex = localCurrFrameIndex;
17367 #if VMA_STATS_STRING_ENABLED
17368 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17369 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17372 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17373 if(localLastUseFrameIndex == localCurrFrameIndex)
17379 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17381 localLastUseFrameIndex = localCurrFrameIndex;
17393 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
17409 return VK_ERROR_INITIALIZATION_FAILED;
17413 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
17415 return VK_ERROR_FEATURE_NOT_PRESENT;
17422 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
17424 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
17426 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
17427 if(res != VK_SUCCESS)
17429 vma_delete(
this, *pPool);
17436 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17437 (*pPool)->SetId(m_NextPoolId++);
17438 m_Pools.PushBack(*pPool);
17444 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17448 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17449 m_Pools.Remove(pool);
17452 vma_delete(
this, pool);
17457 pool->m_BlockVector.GetPoolStats(pPoolStats);
17460 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17462 m_CurrentFrameIndex.store(frameIndex);
17464 #if VMA_MEMORY_BUDGET
17465 if(m_UseExtMemoryBudget)
17467 UpdateVulkanBudget();
17472 void VmaAllocator_T::MakePoolAllocationsLost(
17474 size_t* pLostAllocationCount)
17476 hPool->m_BlockVector.MakePoolAllocationsLost(
17477 m_CurrentFrameIndex.load(),
17478 pLostAllocationCount);
17481 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17483 return hPool->m_BlockVector.CheckCorruption();
17486 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17488 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17491 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17493 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17495 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17496 VMA_ASSERT(pBlockVector);
17497 VkResult localRes = pBlockVector->CheckCorruption();
17500 case VK_ERROR_FEATURE_NOT_PRESENT:
17503 finalRes = VK_SUCCESS;
17513 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17514 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17516 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17518 VkResult localRes = pool->m_BlockVector.CheckCorruption();
17521 case VK_ERROR_FEATURE_NOT_PRESENT:
17524 finalRes = VK_SUCCESS;
17536 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17538 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17539 (*pAllocation)->InitLost();
17543 template<
typename T>
17544 struct AtomicTransactionalIncrement
17547 typedef std::atomic<T> AtomicT;
17548 ~AtomicTransactionalIncrement()
17553 T Increment(AtomicT* atomic)
17556 return m_Atomic->fetch_add(1);
17560 m_Atomic =
nullptr;
17564 AtomicT* m_Atomic =
nullptr;
17567 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17569 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
17570 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
17571 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
17572 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
17574 return VK_ERROR_TOO_MANY_OBJECTS;
17578 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17581 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17583 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17584 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17587 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17588 if(blockBytesAfterAllocation > heapSize)
17590 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17592 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17600 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17604 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17606 if(res == VK_SUCCESS)
17608 #if VMA_MEMORY_BUDGET
17609 ++m_Budget.m_OperationsSinceBudgetFetch;
17613 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17615 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17618 deviceMemoryCountIncrement.Commit();
17622 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17628 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17631 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17633 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17637 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17639 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17641 --m_DeviceMemoryCount;
17644 VkResult VmaAllocator_T::BindVulkanBuffer(
17645 VkDeviceMemory memory,
17646 VkDeviceSize memoryOffset,
17650 if(pNext != VMA_NULL)
17652 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17653 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17654 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17656 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17657 bindBufferMemoryInfo.pNext = pNext;
17658 bindBufferMemoryInfo.buffer = buffer;
17659 bindBufferMemoryInfo.memory = memory;
17660 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17661 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17666 return VK_ERROR_EXTENSION_NOT_PRESENT;
17671 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17675 VkResult VmaAllocator_T::BindVulkanImage(
17676 VkDeviceMemory memory,
17677 VkDeviceSize memoryOffset,
17681 if(pNext != VMA_NULL)
17683 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17684 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17685 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17687 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17688 bindBufferMemoryInfo.pNext = pNext;
17689 bindBufferMemoryInfo.image = image;
17690 bindBufferMemoryInfo.memory = memory;
17691 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17692 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17697 return VK_ERROR_EXTENSION_NOT_PRESENT;
17702 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17706 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17708 if(hAllocation->CanBecomeLost())
17710 return VK_ERROR_MEMORY_MAP_FAILED;
17713 switch(hAllocation->GetType())
17715 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17717 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17718 char *pBytes = VMA_NULL;
17719 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17720 if(res == VK_SUCCESS)
17722 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17723 hAllocation->BlockAllocMap();
17727 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17728 return hAllocation->DedicatedAllocMap(
this, ppData);
17731 return VK_ERROR_MEMORY_MAP_FAILED;
17737 switch(hAllocation->GetType())
17739 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17741 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17742 hAllocation->BlockAllocUnmap();
17743 pBlock->Unmap(
this, 1);
17746 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17747 hAllocation->DedicatedAllocUnmap(
this);
17754 VkResult VmaAllocator_T::BindBufferMemory(
17756 VkDeviceSize allocationLocalOffset,
17760 VkResult res = VK_SUCCESS;
17761 switch(hAllocation->GetType())
17763 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17764 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17766 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17768 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17769 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17770 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17779 VkResult VmaAllocator_T::BindImageMemory(
17781 VkDeviceSize allocationLocalOffset,
17785 VkResult res = VK_SUCCESS;
17786 switch(hAllocation->GetType())
17788 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17789 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17791 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17793 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17794 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17795 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17804 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17806 VkDeviceSize offset, VkDeviceSize size,
17807 VMA_CACHE_OPERATION op)
17809 VkResult res = VK_SUCCESS;
17811 VkMappedMemoryRange memRange = {};
17812 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17816 case VMA_CACHE_FLUSH:
17817 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17819 case VMA_CACHE_INVALIDATE:
17820 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17830 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17831 uint32_t allocationCount,
17833 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17834 VMA_CACHE_OPERATION op)
17836 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17837 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17838 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17840 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17843 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17844 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17845 VkMappedMemoryRange newRange;
17846 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17848 ranges.push_back(newRange);
17852 VkResult res = VK_SUCCESS;
17853 if(!ranges.empty())
17857 case VMA_CACHE_FLUSH:
17858 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17860 case VMA_CACHE_INVALIDATE:
17861 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17871 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17873 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17875 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17877 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17878 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
17879 dedicatedAllocations.Remove(allocation);
17882 VkDeviceMemory hMemory = allocation->GetMemory();
17894 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17896 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17899 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17901 VkBufferCreateInfo dummyBufCreateInfo;
17902 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17904 uint32_t memoryTypeBits = 0;
17907 VkBuffer buf = VK_NULL_HANDLE;
17908 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17909 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17910 if(res == VK_SUCCESS)
17913 VkMemoryRequirements memReq;
17914 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17915 memoryTypeBits = memReq.memoryTypeBits;
17918 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17921 return memoryTypeBits;
17924 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17927 VMA_ASSERT(GetMemoryTypeCount() > 0);
17929 uint32_t memoryTypeBits = UINT32_MAX;
17931 if(!m_UseAmdDeviceCoherentMemory)
17934 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17936 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17938 memoryTypeBits &= ~(1u << memTypeIndex);
17943 return memoryTypeBits;
17946 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17948 VkDeviceSize offset, VkDeviceSize size,
17949 VkMappedMemoryRange& outRange)
const
17951 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17952 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17954 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17955 const VkDeviceSize allocationSize = allocation->GetSize();
17956 VMA_ASSERT(offset <= allocationSize);
17958 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17959 outRange.pNext = VMA_NULL;
17960 outRange.memory = allocation->GetMemory();
17962 switch(allocation->GetType())
17964 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17965 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17966 if(size == VK_WHOLE_SIZE)
17968 outRange.size = allocationSize - outRange.offset;
17972 VMA_ASSERT(offset + size <= allocationSize);
17973 outRange.size = VMA_MIN(
17974 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17975 allocationSize - outRange.offset);
17978 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17981 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17982 if(size == VK_WHOLE_SIZE)
17984 size = allocationSize - offset;
17988 VMA_ASSERT(offset + size <= allocationSize);
17990 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17993 const VkDeviceSize allocationOffset = allocation->GetOffset();
17994 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17995 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17996 outRange.offset += allocationOffset;
17997 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
18009 #if VMA_MEMORY_BUDGET
18011 void VmaAllocator_T::UpdateVulkanBudget()
18013 VMA_ASSERT(m_UseExtMemoryBudget);
18015 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
18017 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
18018 VmaPnextChainPushFront(&memProps, &budgetProps);
18020 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
18023 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
18025 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
18027 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
18028 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
18029 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
18032 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
18034 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
18036 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
18038 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
18040 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
18042 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
18045 m_Budget.m_OperationsSinceBudgetFetch = 0;
18051 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
18053 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
18054 !hAllocation->CanBecomeLost() &&
18055 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18057 void* pData = VMA_NULL;
18058 VkResult res = Map(hAllocation, &pData);
18059 if(res == VK_SUCCESS)
18061 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
18062 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
18063 Unmap(hAllocation);
18067 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
18072 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
18074 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
18075 if(memoryTypeBits == UINT32_MAX)
18077 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
18078 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
18080 return memoryTypeBits;
18083 #if VMA_STATS_STRING_ENABLED
18085 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
18087 bool dedicatedAllocationsStarted =
false;
18088 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18090 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
18091 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
18092 if(!dedicatedAllocList.IsEmpty())
18094 if(dedicatedAllocationsStarted ==
false)
18096 dedicatedAllocationsStarted =
true;
18097 json.WriteString(
"DedicatedAllocations");
18098 json.BeginObject();
18101 json.BeginString(
"Type ");
18102 json.ContinueString(memTypeIndex);
18108 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
18110 json.BeginObject(
true);
18111 alloc->PrintParameters(json);
18118 if(dedicatedAllocationsStarted)
18124 bool allocationsStarted =
false;
18125 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18127 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
18129 if(allocationsStarted ==
false)
18131 allocationsStarted =
true;
18132 json.WriteString(
"DefaultPools");
18133 json.BeginObject();
18136 json.BeginString(
"Type ");
18137 json.ContinueString(memTypeIndex);
18140 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
18143 if(allocationsStarted)
18151 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
18152 if(!m_Pools.IsEmpty())
18154 json.WriteString(
"Pools");
18155 json.BeginObject();
18156 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
18158 json.BeginString();
18159 json.ContinueString(pool->GetId());
18162 pool->m_BlockVector.PrintDetailedMap(json);
18178 VMA_ASSERT(pCreateInfo && pAllocator);
18181 VMA_DEBUG_LOG(
"vmaCreateAllocator");
18183 return (*pAllocator)->Init(pCreateInfo);
18189 if(allocator != VK_NULL_HANDLE)
18191 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
18192 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
18193 vma_delete(&allocationCallbacks, allocator);
18199 VMA_ASSERT(allocator && pAllocatorInfo);
18200 pAllocatorInfo->
instance = allocator->m_hInstance;
18201 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
18202 pAllocatorInfo->
device = allocator->m_hDevice;
18207 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
18209 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
18210 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
18215 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
18217 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
18218 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
18223 uint32_t memoryTypeIndex,
18224 VkMemoryPropertyFlags* pFlags)
18226 VMA_ASSERT(allocator && pFlags);
18227 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
18228 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
18233 uint32_t frameIndex)
18235 VMA_ASSERT(allocator);
18236 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
18238 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18240 allocator->SetCurrentFrameIndex(frameIndex);
18247 VMA_ASSERT(allocator && pStats);
18248 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18249 allocator->CalculateStats(pStats);
18256 VMA_ASSERT(allocator && pBudget);
18257 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18258 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
18261 #if VMA_STATS_STRING_ENABLED
18265 char** ppStatsString,
18266 VkBool32 detailedMap)
18268 VMA_ASSERT(allocator && ppStatsString);
18269 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18271 VmaStringBuilder sb(allocator);
18273 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
18274 json.BeginObject();
18277 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
18280 allocator->CalculateStats(&stats);
18282 json.WriteString(
"Total");
18283 VmaPrintStatInfo(json, stats.
total);
18285 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
18287 json.BeginString(
"Heap ");
18288 json.ContinueString(heapIndex);
18290 json.BeginObject();
18292 json.WriteString(
"Size");
18293 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
18295 json.WriteString(
"Flags");
18296 json.BeginArray(
true);
18297 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
18299 json.WriteString(
"DEVICE_LOCAL");
18303 json.WriteString(
"Budget");
18304 json.BeginObject();
18306 json.WriteString(
"BlockBytes");
18307 json.WriteNumber(budget[heapIndex].blockBytes);
18308 json.WriteString(
"AllocationBytes");
18309 json.WriteNumber(budget[heapIndex].allocationBytes);
18310 json.WriteString(
"Usage");
18311 json.WriteNumber(budget[heapIndex].usage);
18312 json.WriteString(
"Budget");
18313 json.WriteNumber(budget[heapIndex].budget);
18319 json.WriteString(
"Stats");
18320 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
18323 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
18325 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
18327 json.BeginString(
"Type ");
18328 json.ContinueString(typeIndex);
18331 json.BeginObject();
18333 json.WriteString(
"Flags");
18334 json.BeginArray(
true);
18335 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
18336 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
18338 json.WriteString(
"DEVICE_LOCAL");
18340 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18342 json.WriteString(
"HOST_VISIBLE");
18344 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
18346 json.WriteString(
"HOST_COHERENT");
18348 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
18350 json.WriteString(
"HOST_CACHED");
18352 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
18354 json.WriteString(
"LAZILY_ALLOCATED");
18356 #if VMA_VULKAN_VERSION >= 1001000
18357 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
18359 json.WriteString(
"PROTECTED");
18362 #if VK_AMD_device_coherent_memory
18363 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
18365 json.WriteString(
"DEVICE_COHERENT");
18367 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
18369 json.WriteString(
"DEVICE_UNCACHED");
18376 json.WriteString(
"Stats");
18377 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
18386 if(detailedMap == VK_TRUE)
18388 allocator->PrintDetailedMap(json);
18394 const size_t len = sb.GetLength();
18395 char*
const pChars = vma_new_array(allocator,
char, len + 1);
18398 memcpy(pChars, sb.GetData(), len);
18400 pChars[len] =
'\0';
18401 *ppStatsString = pChars;
18406 char* pStatsString)
18408 if(pStatsString != VMA_NULL)
18410 VMA_ASSERT(allocator);
18411 size_t len = strlen(pStatsString);
18412 vma_delete_array(allocator, pStatsString, len + 1);
18423 uint32_t memoryTypeBits,
18425 uint32_t* pMemoryTypeIndex)
18427 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18428 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18429 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18431 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
18438 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
18439 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
18440 uint32_t notPreferredFlags = 0;
18443 switch(pAllocationCreateInfo->
usage)
18448 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18450 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18454 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
18457 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18458 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18460 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18464 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18465 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
18468 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18471 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18480 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18482 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18485 *pMemoryTypeIndex = UINT32_MAX;
18486 uint32_t minCost = UINT32_MAX;
18487 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18488 memTypeIndex < allocator->GetMemoryTypeCount();
18489 ++memTypeIndex, memTypeBit <<= 1)
18492 if((memTypeBit & memoryTypeBits) != 0)
18494 const VkMemoryPropertyFlags currFlags =
18495 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18497 if((requiredFlags & ~currFlags) == 0)
18500 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18501 VmaCountBitsSet(currFlags & notPreferredFlags);
18503 if(currCost < minCost)
18505 *pMemoryTypeIndex = memTypeIndex;
18510 minCost = currCost;
18515 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18520 const VkBufferCreateInfo* pBufferCreateInfo,
18522 uint32_t* pMemoryTypeIndex)
18524 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18525 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18526 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18527 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18529 const VkDevice hDev = allocator->m_hDevice;
18530 VkBuffer hBuffer = VK_NULL_HANDLE;
18531 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18532 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18533 if(res == VK_SUCCESS)
18535 VkMemoryRequirements memReq = {};
18536 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18537 hDev, hBuffer, &memReq);
18541 memReq.memoryTypeBits,
18542 pAllocationCreateInfo,
18545 allocator->GetVulkanFunctions().vkDestroyBuffer(
18546 hDev, hBuffer, allocator->GetAllocationCallbacks());
18553 const VkImageCreateInfo* pImageCreateInfo,
18555 uint32_t* pMemoryTypeIndex)
18557 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18558 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18559 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18560 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18562 const VkDevice hDev = allocator->m_hDevice;
18563 VkImage hImage = VK_NULL_HANDLE;
18564 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18565 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18566 if(res == VK_SUCCESS)
18568 VkMemoryRequirements memReq = {};
18569 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18570 hDev, hImage, &memReq);
18574 memReq.memoryTypeBits,
18575 pAllocationCreateInfo,
18578 allocator->GetVulkanFunctions().vkDestroyImage(
18579 hDev, hImage, allocator->GetAllocationCallbacks());
18589 VMA_ASSERT(allocator && pCreateInfo && pPool);
18591 VMA_DEBUG_LOG(
"vmaCreatePool");
18593 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18595 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18597 #if VMA_RECORDING_ENABLED
18598 if(allocator->GetRecorder() != VMA_NULL)
18600 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18611 VMA_ASSERT(allocator);
18613 if(pool == VK_NULL_HANDLE)
18618 VMA_DEBUG_LOG(
"vmaDestroyPool");
18620 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18622 #if VMA_RECORDING_ENABLED
18623 if(allocator->GetRecorder() != VMA_NULL)
18625 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18629 allocator->DestroyPool(pool);
18637 VMA_ASSERT(allocator && pool && pPoolStats);
18639 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18641 allocator->GetPoolStats(pool, pPoolStats);
18647 size_t* pLostAllocationCount)
18649 VMA_ASSERT(allocator && pool);
18651 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18653 #if VMA_RECORDING_ENABLED
18654 if(allocator->GetRecorder() != VMA_NULL)
18656 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18660 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18665 VMA_ASSERT(allocator && pool);
18667 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18669 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18671 return allocator->CheckPoolCorruption(pool);
18677 const char** ppName)
18679 VMA_ASSERT(allocator && pool && ppName);
18681 VMA_DEBUG_LOG(
"vmaGetPoolName");
18683 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18685 *ppName = pool->GetName();
18693 VMA_ASSERT(allocator && pool);
18695 VMA_DEBUG_LOG(
"vmaSetPoolName");
18697 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18699 pool->SetName(pName);
18701 #if VMA_RECORDING_ENABLED
18702 if(allocator->GetRecorder() != VMA_NULL)
18704 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18711 const VkMemoryRequirements* pVkMemoryRequirements,
18716 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18718 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18720 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18722 VkResult result = allocator->AllocateMemory(
18723 *pVkMemoryRequirements,
18730 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18734 #if VMA_RECORDING_ENABLED
18735 if(allocator->GetRecorder() != VMA_NULL)
18737 allocator->GetRecorder()->RecordAllocateMemory(
18738 allocator->GetCurrentFrameIndex(),
18739 *pVkMemoryRequirements,
18745 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18747 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18755 const VkMemoryRequirements* pVkMemoryRequirements,
18757 size_t allocationCount,
18761 if(allocationCount == 0)
18766 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18768 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18770 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18772 VkResult result = allocator->AllocateMemory(
18773 *pVkMemoryRequirements,
18780 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18784 #if VMA_RECORDING_ENABLED
18785 if(allocator->GetRecorder() != VMA_NULL)
18787 allocator->GetRecorder()->RecordAllocateMemoryPages(
18788 allocator->GetCurrentFrameIndex(),
18789 *pVkMemoryRequirements,
18791 (uint64_t)allocationCount,
18796 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18798 for(
size_t i = 0; i < allocationCount; ++i)
18800 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18814 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18816 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18818 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18820 VkMemoryRequirements vkMemReq = {};
18821 bool requiresDedicatedAllocation =
false;
18822 bool prefersDedicatedAllocation =
false;
18823 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18824 requiresDedicatedAllocation,
18825 prefersDedicatedAllocation);
18827 VkResult result = allocator->AllocateMemory(
18829 requiresDedicatedAllocation,
18830 prefersDedicatedAllocation,
18835 VMA_SUBALLOCATION_TYPE_BUFFER,
18839 #if VMA_RECORDING_ENABLED
18840 if(allocator->GetRecorder() != VMA_NULL)
18842 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18843 allocator->GetCurrentFrameIndex(),
18845 requiresDedicatedAllocation,
18846 prefersDedicatedAllocation,
18852 if(pAllocationInfo && result == VK_SUCCESS)
18854 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18867 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18869 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18871 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18873 VkMemoryRequirements vkMemReq = {};
18874 bool requiresDedicatedAllocation =
false;
18875 bool prefersDedicatedAllocation =
false;
18876 allocator->GetImageMemoryRequirements(image, vkMemReq,
18877 requiresDedicatedAllocation, prefersDedicatedAllocation);
18879 VkResult result = allocator->AllocateMemory(
18881 requiresDedicatedAllocation,
18882 prefersDedicatedAllocation,
18887 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18891 #if VMA_RECORDING_ENABLED
18892 if(allocator->GetRecorder() != VMA_NULL)
18894 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18895 allocator->GetCurrentFrameIndex(),
18897 requiresDedicatedAllocation,
18898 prefersDedicatedAllocation,
18904 if(pAllocationInfo && result == VK_SUCCESS)
18906 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18916 VMA_ASSERT(allocator);
18918 if(allocation == VK_NULL_HANDLE)
18923 VMA_DEBUG_LOG(
"vmaFreeMemory");
18925 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18927 #if VMA_RECORDING_ENABLED
18928 if(allocator->GetRecorder() != VMA_NULL)
18930 allocator->GetRecorder()->RecordFreeMemory(
18931 allocator->GetCurrentFrameIndex(),
18936 allocator->FreeMemory(
18943 size_t allocationCount,
18946 if(allocationCount == 0)
18951 VMA_ASSERT(allocator);
18953 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18955 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18957 #if VMA_RECORDING_ENABLED
18958 if(allocator->GetRecorder() != VMA_NULL)
18960 allocator->GetRecorder()->RecordFreeMemoryPages(
18961 allocator->GetCurrentFrameIndex(),
18962 (uint64_t)allocationCount,
18967 allocator->FreeMemory(allocationCount, pAllocations);
18975 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18977 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18979 #if VMA_RECORDING_ENABLED
18980 if(allocator->GetRecorder() != VMA_NULL)
18982 allocator->GetRecorder()->RecordGetAllocationInfo(
18983 allocator->GetCurrentFrameIndex(),
18988 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18995 VMA_ASSERT(allocator && allocation);
18997 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18999 #if VMA_RECORDING_ENABLED
19000 if(allocator->GetRecorder() != VMA_NULL)
19002 allocator->GetRecorder()->RecordTouchAllocation(
19003 allocator->GetCurrentFrameIndex(),
19008 return allocator->TouchAllocation(allocation);
19016 VMA_ASSERT(allocator && allocation);
19018 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19020 allocation->SetUserData(allocator, pUserData);
19022 #if VMA_RECORDING_ENABLED
19023 if(allocator->GetRecorder() != VMA_NULL)
19025 allocator->GetRecorder()->RecordSetAllocationUserData(
19026 allocator->GetCurrentFrameIndex(),
19037 VMA_ASSERT(allocator && pAllocation);
19039 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
19041 allocator->CreateLostAllocation(pAllocation);
19043 #if VMA_RECORDING_ENABLED
19044 if(allocator->GetRecorder() != VMA_NULL)
19046 allocator->GetRecorder()->RecordCreateLostAllocation(
19047 allocator->GetCurrentFrameIndex(),
19058 VMA_ASSERT(allocator && allocation && ppData);
19060 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19062 VkResult res = allocator->Map(allocation, ppData);
19064 #if VMA_RECORDING_ENABLED
19065 if(allocator->GetRecorder() != VMA_NULL)
19067 allocator->GetRecorder()->RecordMapMemory(
19068 allocator->GetCurrentFrameIndex(),
19080 VMA_ASSERT(allocator && allocation);
19082 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19084 #if VMA_RECORDING_ENABLED
19085 if(allocator->GetRecorder() != VMA_NULL)
19087 allocator->GetRecorder()->RecordUnmapMemory(
19088 allocator->GetCurrentFrameIndex(),
19093 allocator->Unmap(allocation);
19098 VMA_ASSERT(allocator && allocation);
19100 VMA_DEBUG_LOG(
"vmaFlushAllocation");
19102 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19104 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
19106 #if VMA_RECORDING_ENABLED
19107 if(allocator->GetRecorder() != VMA_NULL)
19109 allocator->GetRecorder()->RecordFlushAllocation(
19110 allocator->GetCurrentFrameIndex(),
19111 allocation, offset, size);
19120 VMA_ASSERT(allocator && allocation);
19122 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
19124 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19126 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
19128 #if VMA_RECORDING_ENABLED
19129 if(allocator->GetRecorder() != VMA_NULL)
19131 allocator->GetRecorder()->RecordInvalidateAllocation(
19132 allocator->GetCurrentFrameIndex(),
19133 allocation, offset, size);
19142 uint32_t allocationCount,
19144 const VkDeviceSize* offsets,
19145 const VkDeviceSize* sizes)
19147 VMA_ASSERT(allocator);
19149 if(allocationCount == 0)
19154 VMA_ASSERT(allocations);
19156 VMA_DEBUG_LOG(
"vmaFlushAllocations");
19158 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19160 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
19162 #if VMA_RECORDING_ENABLED
19163 if(allocator->GetRecorder() != VMA_NULL)
19174 uint32_t allocationCount,
19176 const VkDeviceSize* offsets,
19177 const VkDeviceSize* sizes)
19179 VMA_ASSERT(allocator);
19181 if(allocationCount == 0)
19186 VMA_ASSERT(allocations);
19188 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
19190 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19192 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
19194 #if VMA_RECORDING_ENABLED
19195 if(allocator->GetRecorder() != VMA_NULL)
19206 VMA_ASSERT(allocator);
19208 VMA_DEBUG_LOG(
"vmaCheckCorruption");
19210 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19212 return allocator->CheckCorruption(memoryTypeBits);
19218 size_t allocationCount,
19219 VkBool32* pAllocationsChanged,
19229 if(pDefragmentationInfo != VMA_NULL)
19243 if(res == VK_NOT_READY)
19256 VMA_ASSERT(allocator && pInfo && pContext);
19267 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
19269 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
19271 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19273 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
19275 #if VMA_RECORDING_ENABLED
19276 if(allocator->GetRecorder() != VMA_NULL)
19278 allocator->GetRecorder()->RecordDefragmentationBegin(
19279 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
19290 VMA_ASSERT(allocator);
19292 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
19294 if(context != VK_NULL_HANDLE)
19296 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19298 #if VMA_RECORDING_ENABLED
19299 if(allocator->GetRecorder() != VMA_NULL)
19301 allocator->GetRecorder()->RecordDefragmentationEnd(
19302 allocator->GetCurrentFrameIndex(), context);
19306 return allocator->DefragmentationEnd(context);
19320 VMA_ASSERT(allocator);
19323 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
19325 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19327 if(context == VK_NULL_HANDLE)
19333 return allocator->DefragmentationPassBegin(pInfo, context);
19339 VMA_ASSERT(allocator);
19341 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
19342 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19344 if(context == VK_NULL_HANDLE)
19347 return allocator->DefragmentationPassEnd(context);
19355 VMA_ASSERT(allocator && allocation && buffer);
19357 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
19359 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19361 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
19367 VkDeviceSize allocationLocalOffset,
19371 VMA_ASSERT(allocator && allocation && buffer);
19373 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
19375 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19377 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
19385 VMA_ASSERT(allocator && allocation && image);
19387 VMA_DEBUG_LOG(
"vmaBindImageMemory");
19389 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19391 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
19397 VkDeviceSize allocationLocalOffset,
19401 VMA_ASSERT(allocator && allocation && image);
19403 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
19405 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19407 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
19412 const VkBufferCreateInfo* pBufferCreateInfo,
19418 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
19420 if(pBufferCreateInfo->size == 0)
19422 return VK_ERROR_VALIDATION_FAILED_EXT;
19424 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19425 !allocator->m_UseKhrBufferDeviceAddress)
19427 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19428 return VK_ERROR_VALIDATION_FAILED_EXT;
19431 VMA_DEBUG_LOG(
"vmaCreateBuffer");
19433 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19435 *pBuffer = VK_NULL_HANDLE;
19436 *pAllocation = VK_NULL_HANDLE;
19439 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19440 allocator->m_hDevice,
19442 allocator->GetAllocationCallbacks(),
19447 VkMemoryRequirements vkMemReq = {};
19448 bool requiresDedicatedAllocation =
false;
19449 bool prefersDedicatedAllocation =
false;
19450 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19451 requiresDedicatedAllocation, prefersDedicatedAllocation);
19454 res = allocator->AllocateMemory(
19456 requiresDedicatedAllocation,
19457 prefersDedicatedAllocation,
19459 pBufferCreateInfo->usage,
19461 *pAllocationCreateInfo,
19462 VMA_SUBALLOCATION_TYPE_BUFFER,
19466 #if VMA_RECORDING_ENABLED
19467 if(allocator->GetRecorder() != VMA_NULL)
19469 allocator->GetRecorder()->RecordCreateBuffer(
19470 allocator->GetCurrentFrameIndex(),
19471 *pBufferCreateInfo,
19472 *pAllocationCreateInfo,
19482 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19487 #if VMA_STATS_STRING_ENABLED
19488 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19490 if(pAllocationInfo != VMA_NULL)
19492 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19497 allocator->FreeMemory(
19500 *pAllocation = VK_NULL_HANDLE;
19501 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19502 *pBuffer = VK_NULL_HANDLE;
19505 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19506 *pBuffer = VK_NULL_HANDLE;
19514 const VkBufferCreateInfo* pBufferCreateInfo,
19516 VkDeviceSize minAlignment,
19521 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation);
19523 if(pBufferCreateInfo->size == 0)
19525 return VK_ERROR_VALIDATION_FAILED_EXT;
19527 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19528 !allocator->m_UseKhrBufferDeviceAddress)
19530 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19531 return VK_ERROR_VALIDATION_FAILED_EXT;
19534 VMA_DEBUG_LOG(
"vmaCreateBufferWithAlignment");
19536 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19538 *pBuffer = VK_NULL_HANDLE;
19539 *pAllocation = VK_NULL_HANDLE;
19542 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19543 allocator->m_hDevice,
19545 allocator->GetAllocationCallbacks(),
19550 VkMemoryRequirements vkMemReq = {};
19551 bool requiresDedicatedAllocation =
false;
19552 bool prefersDedicatedAllocation =
false;
19553 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19554 requiresDedicatedAllocation, prefersDedicatedAllocation);
19557 vkMemReq.alignment = VMA_MAX(vkMemReq.alignment, minAlignment);
19560 res = allocator->AllocateMemory(
19562 requiresDedicatedAllocation,
19563 prefersDedicatedAllocation,
19565 pBufferCreateInfo->usage,
19567 *pAllocationCreateInfo,
19568 VMA_SUBALLOCATION_TYPE_BUFFER,
19572 #if VMA_RECORDING_ENABLED
19573 if(allocator->GetRecorder() != VMA_NULL)
19575 VMA_ASSERT(0 &&
"Not implemented.");
19584 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19589 #if VMA_STATS_STRING_ENABLED
19590 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19592 if(pAllocationInfo != VMA_NULL)
19594 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19599 allocator->FreeMemory(
19602 *pAllocation = VK_NULL_HANDLE;
19603 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19604 *pBuffer = VK_NULL_HANDLE;
19607 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19608 *pBuffer = VK_NULL_HANDLE;
19619 VMA_ASSERT(allocator);
19621 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19626 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19628 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19630 #if VMA_RECORDING_ENABLED
19631 if(allocator->GetRecorder() != VMA_NULL)
19633 allocator->GetRecorder()->RecordDestroyBuffer(
19634 allocator->GetCurrentFrameIndex(),
19639 if(buffer != VK_NULL_HANDLE)
19641 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19644 if(allocation != VK_NULL_HANDLE)
19646 allocator->FreeMemory(
19654 const VkImageCreateInfo* pImageCreateInfo,
19660 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19662 if(pImageCreateInfo->extent.width == 0 ||
19663 pImageCreateInfo->extent.height == 0 ||
19664 pImageCreateInfo->extent.depth == 0 ||
19665 pImageCreateInfo->mipLevels == 0 ||
19666 pImageCreateInfo->arrayLayers == 0)
19668 return VK_ERROR_VALIDATION_FAILED_EXT;
19671 VMA_DEBUG_LOG(
"vmaCreateImage");
19673 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19675 *pImage = VK_NULL_HANDLE;
19676 *pAllocation = VK_NULL_HANDLE;
19679 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19680 allocator->m_hDevice,
19682 allocator->GetAllocationCallbacks(),
19686 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19687 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19688 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19691 VkMemoryRequirements vkMemReq = {};
19692 bool requiresDedicatedAllocation =
false;
19693 bool prefersDedicatedAllocation =
false;
19694 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19695 requiresDedicatedAllocation, prefersDedicatedAllocation);
19697 res = allocator->AllocateMemory(
19699 requiresDedicatedAllocation,
19700 prefersDedicatedAllocation,
19704 *pAllocationCreateInfo,
19709 #if VMA_RECORDING_ENABLED
19710 if(allocator->GetRecorder() != VMA_NULL)
19712 allocator->GetRecorder()->RecordCreateImage(
19713 allocator->GetCurrentFrameIndex(),
19715 *pAllocationCreateInfo,
19725 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19730 #if VMA_STATS_STRING_ENABLED
19731 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19733 if(pAllocationInfo != VMA_NULL)
19735 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19740 allocator->FreeMemory(
19743 *pAllocation = VK_NULL_HANDLE;
19744 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19745 *pImage = VK_NULL_HANDLE;
19748 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19749 *pImage = VK_NULL_HANDLE;
19760 VMA_ASSERT(allocator);
19762 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19767 VMA_DEBUG_LOG(
"vmaDestroyImage");
19769 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19771 #if VMA_RECORDING_ENABLED
19772 if(allocator->GetRecorder() != VMA_NULL)
19774 allocator->GetRecorder()->RecordDestroyImage(
19775 allocator->GetCurrentFrameIndex(),
19780 if(image != VK_NULL_HANDLE)
19782 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19784 if(allocation != VK_NULL_HANDLE)
19786 allocator->FreeMemory(
Definition: vk_mem_alloc.h:2900
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2926
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2932
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2918
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2939
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2913
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:2946
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2908
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2902
Represents single memory allocation.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:3267
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:3291
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:3311
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:3272
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:3302
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:3316
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:3281
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:2422
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:2427
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2453
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:2478
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:2424
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null.
Definition: vk_mem_alloc.h:2484
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:2436
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2496
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:2433
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:2491
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:2430
uint32_t vulkanApiVersion
Optional. The highest version of Vulkan that the application is designed to use.
Definition: vk_mem_alloc.h:2505
const VkExternalMemoryHandleTypeFlagsKHR * pTypeExternalMemoryHandleTypes
Either null or a pointer to an array of external memory handle types for each Vulkan memory type.
Definition: vk_mem_alloc.h:2516
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:2439
Represents main object of this library initialized.
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:2532
VkDevice device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:2547
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2537
VkPhysicalDevice physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:2542
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
Definition: vk_mem_alloc.h:2638
VkDeviceSize blockBytes
Sum size of all VkDeviceMemory blocks allocated from particular heap, in bytes.
Definition: vk_mem_alloc.h:2641
VkDeviceSize allocationBytes
Sum size of all allocations created in particular heap, in bytes.
Definition: vk_mem_alloc.h:2652
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:2662
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:2673
Represents Opaque object that represents started defragmentation process.
Parameters for defragmentation.
Definition: vk_mem_alloc.h:3666
const VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:3706
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:3672
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:3726
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3721
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:3669
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:3687
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:3690
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:3735
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:3716
const VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:3681
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3711
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:3757
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:3767
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3762
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:3748
uint32_t moveCount
Definition: vk_mem_alloc.h:3749
VmaDefragmentationPassMoveInfo * pMoves
Definition: vk_mem_alloc.h:3750
Definition: vk_mem_alloc.h:3738
VkDeviceMemory memory
Definition: vk_mem_alloc.h:3740
VkDeviceSize offset
Definition: vk_mem_alloc.h:3741
VmaAllocation allocation
Definition: vk_mem_alloc.h:3739
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:3771
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:3779
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3773
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:3775
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:3777
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:2231
void * pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:2237
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:2233
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:2235
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:3068
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:3116
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:3071
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:3074
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:3110
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:3083
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:3088
VkDeviceSize minAllocationAlignment
Additional minimum alignment to be used for all allocations created from this pool....
Definition: vk_mem_alloc.h:3123
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:3096
void * pMemoryAllocateNext
Additional pNext chain to be attached to VkMemoryAllocateInfo used for every allocation made by this ...
Definition: vk_mem_alloc.h:3133
Represents custom memory pool.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:3138
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:3141
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:3160
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:3157
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:3147
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3144
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3150
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:2407
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:2417
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:2409
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:2599
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2610
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2610
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2609
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2611
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:2603
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2611
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2607
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:2601
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2610
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:2605
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2611
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2616
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2618
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2617
VmaStatInfo total
Definition: vk_mem_alloc.h:2619
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:2361
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:2371
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:2376
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:2364
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:2368
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:2373
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:2365
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:2372
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:2369
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:2363
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:2362
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:2375
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:2377
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:2370
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:2366
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:2367
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:2378
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:2374
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:2217
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VkResult vmaEndDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context)
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:2029
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:3064
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:2393
@ VMA_RECORD_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2401
@ VMA_RECORD_FLUSH_AFTER_CALL_BIT
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:2399
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:2241
@ VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
Definition: vk_mem_alloc.h:2316
@ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:2246
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT
Definition: vk_mem_alloc.h:2298
@ VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
Definition: vk_mem_alloc.h:2334
@ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT
Definition: vk_mem_alloc.h:2286
@ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:2271
@ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2353
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT
Definition: vk_mem_alloc.h:2351
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2897
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
void vmaFreeMemory(VmaAllocator allocator, const VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:3656
@ VMA_DEFRAGMENTATION_FLAG_INCREMENTAL
Definition: vk_mem_alloc.h:3657
@ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3658
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
struct VmaDefragmentationPassInfo VmaDefragmentationPassInfo
Parameters for incremental defragmentation steps.
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:2210
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, const VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:3660
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:3008
@ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3043
@ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3062
@ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3054
@ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:3026
@ VMA_POOL_CREATE_ALGORITHM_MASK
Definition: vk_mem_alloc.h:3058
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkResult vmaDefragment(VmaAllocator allocator, const VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
VkResult vmaCreateBufferWithAlignment(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkDeviceSize minAlignment, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Creates a buffer with additional minimum alignment.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
VmaMemoryUsage
Definition: vk_mem_alloc.h:2721
@ VMA_MEMORY_USAGE_MAX_ENUM
Definition: vk_mem_alloc.h:2784
@ VMA_MEMORY_USAGE_CPU_ONLY
Definition: vk_mem_alloc.h:2752
@ VMA_MEMORY_USAGE_CPU_COPY
Definition: vk_mem_alloc.h:2774
@ VMA_MEMORY_USAGE_GPU_TO_CPU
Definition: vk_mem_alloc.h:2768
@ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED
Definition: vk_mem_alloc.h:2782
@ VMA_MEMORY_USAGE_CPU_TO_GPU
Definition: vk_mem_alloc.h:2759
@ VMA_MEMORY_USAGE_GPU_ONLY
Definition: vk_mem_alloc.h:2742
@ VMA_MEMORY_USAGE_UNKNOWN
Definition: vk_mem_alloc.h:2725
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
VkResult vmaInvalidateAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Invalidates memory of given set of allocations.
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
VkResult vmaBeginDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context, VmaDefragmentationPassInfo *pInfo)
VkResult vmaFlushAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Flushes memory of given set of allocations.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:2355
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2788
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Definition: vk_mem_alloc.h:2883
@ VMA_ALLOCATION_CREATE_MAPPED_BIT
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2819
@ VMA_ALLOCATION_CREATE_DONT_BIND_BIT
Definition: vk_mem_alloc.h:2856
@ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT
Definition: vk_mem_alloc.h:2876
@ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2795
@ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Definition: vk_mem_alloc.h:2850
@ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
Definition: vk_mem_alloc.h:2832
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT
Definition: vk_mem_alloc.h:2886
@ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
Definition: vk_mem_alloc.h:2839
@ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT
Definition: vk_mem_alloc.h:2865
@ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2806
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Definition: vk_mem_alloc.h:2880
@ VMA_ALLOCATION_CREATE_STRATEGY_MASK
Definition: vk_mem_alloc.h:2890
@ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT
Definition: vk_mem_alloc.h:2845
@ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT
Definition: vk_mem_alloc.h:2860
@ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT
Definition: vk_mem_alloc.h:2869
@ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2895
void vmaSetPoolName(VmaAllocator allocator, VmaPool pool, const char *pName)
Sets name of a custom pool.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
void vmaGetBudget(VmaAllocator allocator, VmaBudget *pBudget)
Retrieves information about current memory budget for all memory heaps.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
void vmaGetPoolName(VmaAllocator allocator, VmaPool pool, const char **ppName)
Retrieves name of a custom pool.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:2403
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
void vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo *pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.