23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2025 #ifndef VMA_RECORDING_ENABLED
2026 #define VMA_RECORDING_ENABLED 0
2029 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2033 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2034 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2035 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2036 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2037 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2038 extern PFN_vkAllocateMemory vkAllocateMemory;
2039 extern PFN_vkFreeMemory vkFreeMemory;
2040 extern PFN_vkMapMemory vkMapMemory;
2041 extern PFN_vkUnmapMemory vkUnmapMemory;
2042 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2043 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2044 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2045 extern PFN_vkBindImageMemory vkBindImageMemory;
2046 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2047 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2048 extern PFN_vkCreateBuffer vkCreateBuffer;
2049 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2050 extern PFN_vkCreateImage vkCreateImage;
2051 extern PFN_vkDestroyImage vkDestroyImage;
2052 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2053 #if VMA_VULKAN_VERSION >= 1001000
2054 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2055 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2056 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2057 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2058 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2063 #include <vulkan/vulkan.h>
2069 #if !defined(VMA_VULKAN_VERSION)
2070 #if defined(VK_VERSION_1_2)
2071 #define VMA_VULKAN_VERSION 1002000
2072 #elif defined(VK_VERSION_1_1)
2073 #define VMA_VULKAN_VERSION 1001000
2075 #define VMA_VULKAN_VERSION 1000000
2079 #if !defined(VMA_DEDICATED_ALLOCATION)
2080 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2081 #define VMA_DEDICATED_ALLOCATION 1
2083 #define VMA_DEDICATED_ALLOCATION 0
2087 #if !defined(VMA_BIND_MEMORY2)
2088 #if VK_KHR_bind_memory2
2089 #define VMA_BIND_MEMORY2 1
2091 #define VMA_BIND_MEMORY2 0
2095 #if !defined(VMA_MEMORY_BUDGET)
2096 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2097 #define VMA_MEMORY_BUDGET 1
2099 #define VMA_MEMORY_BUDGET 0
2104 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2105 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2106 #define VMA_BUFFER_DEVICE_ADDRESS 1
2108 #define VMA_BUFFER_DEVICE_ADDRESS 0
2113 #if !defined(VMA_MEMORY_PRIORITY)
2114 #if VK_EXT_memory_priority
2115 #define VMA_MEMORY_PRIORITY 1
2117 #define VMA_MEMORY_PRIORITY 0
2122 #if !defined(VMA_EXTERNAL_MEMORY)
2123 #if VK_KHR_external_memory
2124 #define VMA_EXTERNAL_MEMORY 1
2126 #define VMA_EXTERNAL_MEMORY 0
2135 #ifndef VMA_CALL_PRE
2136 #define VMA_CALL_PRE
2138 #ifndef VMA_CALL_POST
2139 #define VMA_CALL_POST
2153 #ifndef VMA_LEN_IF_NOT_NULL
2154 #define VMA_LEN_IF_NOT_NULL(len)
2159 #ifndef VMA_NULLABLE
2161 #define VMA_NULLABLE _Nullable
2163 #define VMA_NULLABLE
2169 #ifndef VMA_NOT_NULL
2171 #define VMA_NOT_NULL _Nonnull
2173 #define VMA_NOT_NULL
2179 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2180 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2181 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2183 #define VMA_NOT_NULL_NON_DISPATCHABLE
2187 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2188 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2189 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2191 #define VMA_NULLABLE_NON_DISPATCHABLE
2209 uint32_t memoryType,
2210 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2212 void* VMA_NULLABLE pUserData);
2216 uint32_t memoryType,
2217 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2219 void* VMA_NULLABLE pUserData);
2376 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2377 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2378 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2380 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2381 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2382 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2384 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2385 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2475 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2503 #if VMA_EXTERNAL_MEMORY
2560 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2568 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2578 uint32_t memoryTypeIndex,
2579 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2591 uint32_t frameIndex);
2687 #ifndef VMA_STATS_STRING_ENABLED
2688 #define VMA_STATS_STRING_ENABLED 1
2691 #if VMA_STATS_STRING_ENABLED
2698 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2699 VkBool32 detailedMap);
2703 char* VMA_NULLABLE pStatsString);
2964 uint32_t memoryTypeBits,
2966 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2982 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2984 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3000 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3002 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3169 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3197 size_t* VMA_NULLABLE pLostAllocationCount);
3224 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3234 const char* VMA_NULLABLE pName);
3328 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3354 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3356 size_t allocationCount,
3357 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3358 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3368 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3376 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3401 size_t allocationCount,
3402 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3459 void* VMA_NULLABLE pUserData);
3516 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3554 VkDeviceSize offset,
3581 VkDeviceSize offset,
3600 uint32_t allocationCount,
3601 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3602 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3603 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3621 uint32_t allocationCount,
3622 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3623 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3624 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3737 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3875 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3876 size_t allocationCount,
3877 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3896 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3911 VkDeviceSize allocationLocalOffset,
3912 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3913 const void* VMA_NULLABLE pNext);
3930 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3945 VkDeviceSize allocationLocalOffset,
3946 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3947 const void* VMA_NULLABLE pNext);
3981 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3983 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3995 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3997 VkDeviceSize minAlignment,
3998 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
4015 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
4021 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
4023 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
4040 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
4050 #if defined(__cplusplus) && defined(__INTELLISENSE__)
4051 #define VMA_IMPLEMENTATION
4054 #ifdef VMA_IMPLEMENTATION
4055 #undef VMA_IMPLEMENTATION
4062 #if VMA_RECORDING_ENABLED
4065 #include <windows.h>
4085 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4086 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4095 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4096 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4097 #if defined(VK_NO_PROTOTYPES)
4098 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4099 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4112 #if VMA_USE_STL_CONTAINERS
4113 #define VMA_USE_STL_VECTOR 1
4114 #define VMA_USE_STL_UNORDERED_MAP 1
4115 #define VMA_USE_STL_LIST 1
4118 #ifndef VMA_USE_STL_SHARED_MUTEX
4120 #if __cplusplus >= 201703L
4121 #define VMA_USE_STL_SHARED_MUTEX 1
4125 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4126 #define VMA_USE_STL_SHARED_MUTEX 1
4128 #define VMA_USE_STL_SHARED_MUTEX 0
4136 #if VMA_USE_STL_VECTOR
4140 #if VMA_USE_STL_UNORDERED_MAP
4141 #include <unordered_map>
4144 #if VMA_USE_STL_LIST
4153 #include <algorithm>
4158 #define VMA_NULL nullptr
4161 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4163 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4166 if(alignment <
sizeof(
void*))
4168 alignment =
sizeof(
void*);
4171 return memalign(alignment, size);
4173 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4176 #if defined(__APPLE__)
4177 #include <AvailabilityMacros.h>
4180 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4198 if(alignment <
sizeof(
void*))
4200 alignment =
sizeof(
void*);
4204 if(posix_memalign(&pointer, alignment, size) == 0)
4208 #elif defined(_WIN32)
4209 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4211 return _aligned_malloc(size, alignment);
4214 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4216 return aligned_alloc(alignment, size);
4221 static void vma_aligned_free(
void* ptr)
4226 static void vma_aligned_free(
void* VMA_NULLABLE ptr)
4240 #define VMA_ASSERT(expr)
4242 #define VMA_ASSERT(expr) assert(expr)
4248 #ifndef VMA_HEAVY_ASSERT
4250 #define VMA_HEAVY_ASSERT(expr)
4252 #define VMA_HEAVY_ASSERT(expr)
4256 #ifndef VMA_ALIGN_OF
4257 #define VMA_ALIGN_OF(type) (__alignof(type))
4260 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4261 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4264 #ifndef VMA_SYSTEM_ALIGNED_FREE
4266 #if defined(VMA_SYSTEM_FREE)
4267 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4269 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4274 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4278 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4282 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4286 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4289 #ifndef VMA_DEBUG_LOG
4290 #define VMA_DEBUG_LOG(format, ...)
4300 #if VMA_STATS_STRING_ENABLED
4301 static inline void VmaUint32ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint32_t num)
4303 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4305 static inline void VmaUint64ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint64_t num)
4307 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4309 static inline void VmaPtrToStr(
char* VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
4311 snprintf(outStr, strLen,
"%p", ptr);
4319 void Lock() { m_Mutex.lock(); }
4320 void Unlock() { m_Mutex.unlock(); }
4321 bool TryLock() {
return m_Mutex.try_lock(); }
4325 #define VMA_MUTEX VmaMutex
4329 #ifndef VMA_RW_MUTEX
4330 #if VMA_USE_STL_SHARED_MUTEX
4332 #include <shared_mutex>
4336 void LockRead() { m_Mutex.lock_shared(); }
4337 void UnlockRead() { m_Mutex.unlock_shared(); }
4338 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4339 void LockWrite() { m_Mutex.lock(); }
4340 void UnlockWrite() { m_Mutex.unlock(); }
4341 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4343 std::shared_mutex m_Mutex;
4345 #define VMA_RW_MUTEX VmaRWMutex
4346 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4352 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4353 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4354 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4355 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4356 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4357 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4358 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4362 #define VMA_RW_MUTEX VmaRWMutex
4368 void LockRead() { m_Mutex.Lock(); }
4369 void UnlockRead() { m_Mutex.Unlock(); }
4370 bool TryLockRead() {
return m_Mutex.TryLock(); }
4371 void LockWrite() { m_Mutex.Lock(); }
4372 void UnlockWrite() { m_Mutex.Unlock(); }
4373 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4377 #define VMA_RW_MUTEX VmaRWMutex
4384 #ifndef VMA_ATOMIC_UINT32
4386 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4389 #ifndef VMA_ATOMIC_UINT64
4391 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4394 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4399 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4402 #ifndef VMA_MIN_ALIGNMENT
4407 #ifdef VMA_DEBUG_ALIGNMENT
4408 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT
4410 #define VMA_MIN_ALIGNMENT (1)
4414 #ifndef VMA_DEBUG_MARGIN
4419 #define VMA_DEBUG_MARGIN (0)
4422 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4427 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4430 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4436 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4439 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4444 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4447 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4452 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4455 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
4460 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
4463 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4465 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4468 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4470 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4473 #ifndef VMA_CLASS_NO_COPY
4474 #define VMA_CLASS_NO_COPY(className) \
4476 className(const className&) = delete; \
4477 className& operator=(const className&) = delete;
4480 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4483 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4485 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4486 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4494 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4495 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4496 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4498 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4500 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4501 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4504 static inline uint32_t VmaCountBitsSet(uint32_t v)
4506 uint32_t c = v - ((v >> 1) & 0x55555555);
4507 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4508 c = ((c >> 4) + c) & 0x0F0F0F0F;
4509 c = ((c >> 8) + c) & 0x00FF00FF;
4510 c = ((c >> 16) + c) & 0x0000FFFF;
4519 template <
typename T>
4520 inline bool VmaIsPow2(T x)
4522 return (x & (x-1)) == 0;
4527 template <
typename T>
4528 static inline T VmaAlignUp(T val, T alignment)
4530 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4531 return (val + alignment - 1) & ~(alignment - 1);
4535 template <
typename T>
4536 static inline T VmaAlignDown(T val, T alignment)
4538 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4539 return val & ~(alignment - 1);
4543 template <
typename T>
4544 static inline T VmaRoundDiv(T x, T y)
4546 return (x + (y / (T)2)) / y;
4550 static inline uint32_t VmaNextPow2(uint32_t v)
4561 static inline uint64_t VmaNextPow2(uint64_t v)
4575 static inline uint32_t VmaPrevPow2(uint32_t v)
4585 static inline uint64_t VmaPrevPow2(uint64_t v)
4597 static inline bool VmaStrIsEmpty(
const char* pStr)
4599 return pStr == VMA_NULL || *pStr ==
'\0';
4602 #if VMA_STATS_STRING_ENABLED
4604 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4624 template<
typename Iterator,
typename Compare>
4625 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4627 Iterator centerValue = end; --centerValue;
4628 Iterator insertIndex = beg;
4629 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4631 if(cmp(*memTypeIndex, *centerValue))
4633 if(insertIndex != memTypeIndex)
4635 VMA_SWAP(*memTypeIndex, *insertIndex);
4640 if(insertIndex != centerValue)
4642 VMA_SWAP(*insertIndex, *centerValue);
4647 template<
typename Iterator,
typename Compare>
4648 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4652 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4653 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4654 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4658 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4669 static inline bool VmaBlocksOnSamePage(
4670 VkDeviceSize resourceAOffset,
4671 VkDeviceSize resourceASize,
4672 VkDeviceSize resourceBOffset,
4673 VkDeviceSize pageSize)
4675 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4676 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4677 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4678 VkDeviceSize resourceBStart = resourceBOffset;
4679 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4680 return resourceAEndPage == resourceBStartPage;
4683 enum VmaSuballocationType
4685 VMA_SUBALLOCATION_TYPE_FREE = 0,
4686 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4687 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4688 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4689 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4690 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4691 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4700 static inline bool VmaIsBufferImageGranularityConflict(
4701 VmaSuballocationType suballocType1,
4702 VmaSuballocationType suballocType2)
4704 if(suballocType1 > suballocType2)
4706 VMA_SWAP(suballocType1, suballocType2);
4709 switch(suballocType1)
4711 case VMA_SUBALLOCATION_TYPE_FREE:
4713 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4715 case VMA_SUBALLOCATION_TYPE_BUFFER:
4717 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4718 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4719 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4721 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4722 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4723 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4724 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4726 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4727 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4735 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4737 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4738 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4739 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4740 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4742 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4749 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4751 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4752 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4753 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4754 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4756 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4769 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4771 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4772 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4773 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4774 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4780 VMA_CLASS_NO_COPY(VmaMutexLock)
4782 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4783 m_pMutex(useMutex ? &mutex : VMA_NULL)
4784 {
if(m_pMutex) { m_pMutex->Lock(); } }
4786 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4788 VMA_MUTEX* m_pMutex;
4792 struct VmaMutexLockRead
4794 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4796 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4797 m_pMutex(useMutex ? &mutex : VMA_NULL)
4798 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4799 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4801 VMA_RW_MUTEX* m_pMutex;
4805 struct VmaMutexLockWrite
4807 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4809 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4810 m_pMutex(useMutex ? &mutex : VMA_NULL)
4811 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4812 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4814 VMA_RW_MUTEX* m_pMutex;
4817 #if VMA_DEBUG_GLOBAL_MUTEX
4818 static VMA_MUTEX gDebugGlobalMutex;
4819 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4821 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4825 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4836 template <
typename CmpLess,
typename IterT,
typename KeyT>
4837 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4839 size_t down = 0, up = (end - beg);
4842 const size_t mid = down + (up - down) / 2;
4843 if(cmp(*(beg+mid), key))
4855 template<
typename CmpLess,
typename IterT,
typename KeyT>
4856 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4858 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4859 beg, end, value, cmp);
4861 (!cmp(*it, value) && !cmp(value, *it)))
4873 template<
typename T>
4874 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4876 for(uint32_t i = 0; i < count; ++i)
4878 const T iPtr = arr[i];
4879 if(iPtr == VMA_NULL)
4883 for(uint32_t j = i + 1; j < count; ++j)
4894 template<
typename MainT,
typename NewT>
4895 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4897 newStruct->pNext = mainStruct->pNext;
4898 mainStruct->pNext = newStruct;
4904 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4906 void* result = VMA_NULL;
4907 if((pAllocationCallbacks != VMA_NULL) &&
4908 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4910 result = (*pAllocationCallbacks->pfnAllocation)(
4911 pAllocationCallbacks->pUserData,
4914 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4918 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4920 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4924 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4926 if((pAllocationCallbacks != VMA_NULL) &&
4927 (pAllocationCallbacks->pfnFree != VMA_NULL))
4929 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4933 VMA_SYSTEM_ALIGNED_FREE(ptr);
4937 template<
typename T>
4938 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4940 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4943 template<
typename T>
4944 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4946 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4949 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4951 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4953 template<
typename T>
4954 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4957 VmaFree(pAllocationCallbacks, ptr);
4960 template<
typename T>
4961 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4965 for(
size_t i = count; i--; )
4969 VmaFree(pAllocationCallbacks, ptr);
4973 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4975 if(srcStr != VMA_NULL)
4977 const size_t len = strlen(srcStr);
4978 char*
const result = vma_new_array(allocs,
char, len + 1);
4979 memcpy(result, srcStr, len + 1);
4988 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4992 const size_t len = strlen(str);
4993 vma_delete_array(allocs, str, len + 1);
4998 template<
typename T>
4999 class VmaStlAllocator
5002 const VkAllocationCallbacks*
const m_pCallbacks;
5003 typedef T value_type;
5005 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
5006 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
5008 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
5009 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
5011 template<
typename U>
5012 bool operator==(
const VmaStlAllocator<U>& rhs)
const
5014 return m_pCallbacks == rhs.m_pCallbacks;
5016 template<
typename U>
5017 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
5019 return m_pCallbacks != rhs.m_pCallbacks;
5022 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
5023 VmaStlAllocator(
const VmaStlAllocator&) =
default;
5026 #if VMA_USE_STL_VECTOR
5028 #define VmaVector std::vector
5030 template<
typename T,
typename allocatorT>
5031 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
5033 vec.insert(vec.begin() + index, item);
5036 template<
typename T,
typename allocatorT>
5037 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
5039 vec.erase(vec.begin() + index);
5047 template<
typename T,
typename AllocatorT>
5051 typedef T value_type;
5053 VmaVector(
const AllocatorT& allocator) :
5054 m_Allocator(allocator),
5061 VmaVector(
size_t count,
const AllocatorT& allocator) :
5062 m_Allocator(allocator),
5063 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
5071 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
5072 : VmaVector(count, allocator) {}
5074 VmaVector(
const VmaVector<T, AllocatorT>& src) :
5075 m_Allocator(src.m_Allocator),
5076 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
5077 m_Count(src.m_Count),
5078 m_Capacity(src.m_Count)
5082 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
5088 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5091 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5095 resize(rhs.m_Count);
5098 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5104 bool empty()
const {
return m_Count == 0; }
5105 size_t size()
const {
return m_Count; }
5106 T* data() {
return m_pArray; }
5107 const T* data()
const {
return m_pArray; }
5109 T& operator[](
size_t index)
5111 VMA_HEAVY_ASSERT(index < m_Count);
5112 return m_pArray[index];
5114 const T& operator[](
size_t index)
const
5116 VMA_HEAVY_ASSERT(index < m_Count);
5117 return m_pArray[index];
5122 VMA_HEAVY_ASSERT(m_Count > 0);
5125 const T& front()
const
5127 VMA_HEAVY_ASSERT(m_Count > 0);
5132 VMA_HEAVY_ASSERT(m_Count > 0);
5133 return m_pArray[m_Count - 1];
5135 const T& back()
const
5137 VMA_HEAVY_ASSERT(m_Count > 0);
5138 return m_pArray[m_Count - 1];
5141 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5143 newCapacity = VMA_MAX(newCapacity, m_Count);
5145 if((newCapacity < m_Capacity) && !freeMemory)
5147 newCapacity = m_Capacity;
5150 if(newCapacity != m_Capacity)
5152 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5155 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5157 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5158 m_Capacity = newCapacity;
5159 m_pArray = newArray;
5163 void resize(
size_t newCount)
5165 size_t newCapacity = m_Capacity;
5166 if(newCount > m_Capacity)
5168 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5171 if(newCapacity != m_Capacity)
5173 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5174 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5175 if(elementsToCopy != 0)
5177 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5179 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5180 m_Capacity = newCapacity;
5181 m_pArray = newArray;
5192 void shrink_to_fit()
5194 if(m_Capacity > m_Count)
5196 T* newArray = VMA_NULL;
5199 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
5200 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5202 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5203 m_Capacity = m_Count;
5204 m_pArray = newArray;
5208 void insert(
size_t index,
const T& src)
5210 VMA_HEAVY_ASSERT(index <= m_Count);
5211 const size_t oldCount = size();
5212 resize(oldCount + 1);
5213 if(index < oldCount)
5215 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5217 m_pArray[index] = src;
5220 void remove(
size_t index)
5222 VMA_HEAVY_ASSERT(index < m_Count);
5223 const size_t oldCount = size();
5224 if(index < oldCount - 1)
5226 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5228 resize(oldCount - 1);
5231 void push_back(
const T& src)
5233 const size_t newIndex = size();
5234 resize(newIndex + 1);
5235 m_pArray[newIndex] = src;
5240 VMA_HEAVY_ASSERT(m_Count > 0);
5244 void push_front(
const T& src)
5251 VMA_HEAVY_ASSERT(m_Count > 0);
5255 typedef T* iterator;
5256 typedef const T* const_iterator;
5258 iterator begin() {
return m_pArray; }
5259 iterator end() {
return m_pArray + m_Count; }
5260 const_iterator cbegin()
const {
return m_pArray; }
5261 const_iterator cend()
const {
return m_pArray + m_Count; }
5262 const_iterator begin()
const {
return cbegin(); }
5263 const_iterator end()
const {
return cend(); }
5266 AllocatorT m_Allocator;
5272 template<
typename T,
typename allocatorT>
5273 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5275 vec.insert(index, item);
5278 template<
typename T,
typename allocatorT>
5279 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5286 template<
typename CmpLess,
typename VectorT>
5287 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5289 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5291 vector.data() + vector.size(),
5293 CmpLess()) - vector.data();
5294 VmaVectorInsert(vector, indexToInsert, value);
5295 return indexToInsert;
5298 template<
typename CmpLess,
typename VectorT>
5299 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5302 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5307 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5309 size_t indexToRemove = it - vector.begin();
5310 VmaVectorRemove(vector, indexToRemove);
5327 template<
typename T,
typename AllocatorT,
size_t N>
5328 class VmaSmallVector
5331 typedef T value_type;
5333 VmaSmallVector(
const AllocatorT& allocator) :
5335 m_DynamicArray(allocator)
5338 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5340 m_DynamicArray(count > N ? count : 0, allocator)
5343 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5344 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5345 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5346 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5348 bool empty()
const {
return m_Count == 0; }
5349 size_t size()
const {
return m_Count; }
5350 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5351 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5353 T& operator[](
size_t index)
5355 VMA_HEAVY_ASSERT(index < m_Count);
5356 return data()[index];
5358 const T& operator[](
size_t index)
const
5360 VMA_HEAVY_ASSERT(index < m_Count);
5361 return data()[index];
5366 VMA_HEAVY_ASSERT(m_Count > 0);
5369 const T& front()
const
5371 VMA_HEAVY_ASSERT(m_Count > 0);
5376 VMA_HEAVY_ASSERT(m_Count > 0);
5377 return data()[m_Count - 1];
5379 const T& back()
const
5381 VMA_HEAVY_ASSERT(m_Count > 0);
5382 return data()[m_Count - 1];
5385 void resize(
size_t newCount,
bool freeMemory =
false)
5387 if(newCount > N && m_Count > N)
5390 m_DynamicArray.resize(newCount);
5393 m_DynamicArray.shrink_to_fit();
5396 else if(newCount > N && m_Count <= N)
5399 m_DynamicArray.resize(newCount);
5402 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5405 else if(newCount <= N && m_Count > N)
5410 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5412 m_DynamicArray.resize(0);
5415 m_DynamicArray.shrink_to_fit();
5425 void clear(
bool freeMemory =
false)
5427 m_DynamicArray.clear();
5430 m_DynamicArray.shrink_to_fit();
5435 void insert(
size_t index,
const T& src)
5437 VMA_HEAVY_ASSERT(index <= m_Count);
5438 const size_t oldCount = size();
5439 resize(oldCount + 1);
5440 T*
const dataPtr = data();
5441 if(index < oldCount)
5444 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5446 dataPtr[index] = src;
5449 void remove(
size_t index)
5451 VMA_HEAVY_ASSERT(index < m_Count);
5452 const size_t oldCount = size();
5453 if(index < oldCount - 1)
5456 T*
const dataPtr = data();
5457 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5459 resize(oldCount - 1);
5462 void push_back(
const T& src)
5464 const size_t newIndex = size();
5465 resize(newIndex + 1);
5466 data()[newIndex] = src;
5471 VMA_HEAVY_ASSERT(m_Count > 0);
5475 void push_front(
const T& src)
5482 VMA_HEAVY_ASSERT(m_Count > 0);
5486 typedef T* iterator;
5488 iterator begin() {
return data(); }
5489 iterator end() {
return data() + m_Count; }
5494 VmaVector<T, AllocatorT> m_DynamicArray;
5505 template<
typename T>
5506 class VmaPoolAllocator
5508 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5510 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5511 ~VmaPoolAllocator();
5512 template<
typename... Types> T* Alloc(Types... args);
5518 uint32_t NextFreeIndex;
5519 alignas(T)
char Value[
sizeof(T)];
5526 uint32_t FirstFreeIndex;
5529 const VkAllocationCallbacks* m_pAllocationCallbacks;
5530 const uint32_t m_FirstBlockCapacity;
5531 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5533 ItemBlock& CreateNewBlock();
5536 template<
typename T>
5537 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5538 m_pAllocationCallbacks(pAllocationCallbacks),
5539 m_FirstBlockCapacity(firstBlockCapacity),
5540 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5542 VMA_ASSERT(m_FirstBlockCapacity > 1);
5545 template<
typename T>
5546 VmaPoolAllocator<T>::~VmaPoolAllocator()
5548 for(
size_t i = m_ItemBlocks.size(); i--; )
5549 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5550 m_ItemBlocks.clear();
5553 template<
typename T>
5554 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5556 for(
size_t i = m_ItemBlocks.size(); i--; )
5558 ItemBlock& block = m_ItemBlocks[i];
5560 if(block.FirstFreeIndex != UINT32_MAX)
5562 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5563 block.FirstFreeIndex = pItem->NextFreeIndex;
5564 T* result = (T*)&pItem->Value;
5565 new(result)T(std::forward<Types>(args)...);
5571 ItemBlock& newBlock = CreateNewBlock();
5572 Item*
const pItem = &newBlock.pItems[0];
5573 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5574 T* result = (T*)&pItem->Value;
5575 new(result)T(std::forward<Types>(args)...);
5579 template<
typename T>
5580 void VmaPoolAllocator<T>::Free(T* ptr)
5583 for(
size_t i = m_ItemBlocks.size(); i--; )
5585 ItemBlock& block = m_ItemBlocks[i];
5589 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5592 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5595 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5596 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5597 block.FirstFreeIndex = index;
5601 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5604 template<
typename T>
5605 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5607 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5608 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5610 const ItemBlock newBlock = {
5611 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5615 m_ItemBlocks.push_back(newBlock);
5618 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5619 newBlock.pItems[i].NextFreeIndex = i + 1;
5620 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5621 return m_ItemBlocks.back();
5627 #if VMA_USE_STL_LIST
5629 #define VmaList std::list
5633 template<
typename T>
5642 template<
typename T>
5645 VMA_CLASS_NO_COPY(VmaRawList)
5647 typedef VmaListItem<T> ItemType;
5649 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5653 size_t GetCount()
const {
return m_Count; }
5654 bool IsEmpty()
const {
return m_Count == 0; }
5656 ItemType* Front() {
return m_pFront; }
5657 const ItemType* Front()
const {
return m_pFront; }
5658 ItemType* Back() {
return m_pBack; }
5659 const ItemType* Back()
const {
return m_pBack; }
5661 ItemType* PushBack();
5662 ItemType* PushFront();
5663 ItemType* PushBack(
const T& value);
5664 ItemType* PushFront(
const T& value);
5669 ItemType* InsertBefore(ItemType* pItem);
5671 ItemType* InsertAfter(ItemType* pItem);
5673 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5674 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5676 void Remove(ItemType* pItem);
5679 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5680 VmaPoolAllocator<ItemType> m_ItemAllocator;
5686 template<
typename T>
5687 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5688 m_pAllocationCallbacks(pAllocationCallbacks),
5689 m_ItemAllocator(pAllocationCallbacks, 128),
5696 template<
typename T>
5697 VmaRawList<T>::~VmaRawList()
5703 template<
typename T>
5704 void VmaRawList<T>::Clear()
5706 if(IsEmpty() ==
false)
5708 ItemType* pItem = m_pBack;
5709 while(pItem != VMA_NULL)
5711 ItemType*
const pPrevItem = pItem->pPrev;
5712 m_ItemAllocator.Free(pItem);
5715 m_pFront = VMA_NULL;
5721 template<
typename T>
5722 VmaListItem<T>* VmaRawList<T>::PushBack()
5724 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5725 pNewItem->pNext = VMA_NULL;
5728 pNewItem->pPrev = VMA_NULL;
5729 m_pFront = pNewItem;
5735 pNewItem->pPrev = m_pBack;
5736 m_pBack->pNext = pNewItem;
5743 template<
typename T>
5744 VmaListItem<T>* VmaRawList<T>::PushFront()
5746 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5747 pNewItem->pPrev = VMA_NULL;
5750 pNewItem->pNext = VMA_NULL;
5751 m_pFront = pNewItem;
5757 pNewItem->pNext = m_pFront;
5758 m_pFront->pPrev = pNewItem;
5759 m_pFront = pNewItem;
5765 template<
typename T>
5766 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5768 ItemType*
const pNewItem = PushBack();
5769 pNewItem->Value = value;
5773 template<
typename T>
5774 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5776 ItemType*
const pNewItem = PushFront();
5777 pNewItem->Value = value;
5781 template<
typename T>
5782 void VmaRawList<T>::PopBack()
5784 VMA_HEAVY_ASSERT(m_Count > 0);
5785 ItemType*
const pBackItem = m_pBack;
5786 ItemType*
const pPrevItem = pBackItem->pPrev;
5787 if(pPrevItem != VMA_NULL)
5789 pPrevItem->pNext = VMA_NULL;
5791 m_pBack = pPrevItem;
5792 m_ItemAllocator.Free(pBackItem);
5796 template<
typename T>
5797 void VmaRawList<T>::PopFront()
5799 VMA_HEAVY_ASSERT(m_Count > 0);
5800 ItemType*
const pFrontItem = m_pFront;
5801 ItemType*
const pNextItem = pFrontItem->pNext;
5802 if(pNextItem != VMA_NULL)
5804 pNextItem->pPrev = VMA_NULL;
5806 m_pFront = pNextItem;
5807 m_ItemAllocator.Free(pFrontItem);
5811 template<
typename T>
5812 void VmaRawList<T>::Remove(ItemType* pItem)
5814 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5815 VMA_HEAVY_ASSERT(m_Count > 0);
5817 if(pItem->pPrev != VMA_NULL)
5819 pItem->pPrev->pNext = pItem->pNext;
5823 VMA_HEAVY_ASSERT(m_pFront == pItem);
5824 m_pFront = pItem->pNext;
5827 if(pItem->pNext != VMA_NULL)
5829 pItem->pNext->pPrev = pItem->pPrev;
5833 VMA_HEAVY_ASSERT(m_pBack == pItem);
5834 m_pBack = pItem->pPrev;
5837 m_ItemAllocator.Free(pItem);
5841 template<
typename T>
5842 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5844 if(pItem != VMA_NULL)
5846 ItemType*
const prevItem = pItem->pPrev;
5847 ItemType*
const newItem = m_ItemAllocator.Alloc();
5848 newItem->pPrev = prevItem;
5849 newItem->pNext = pItem;
5850 pItem->pPrev = newItem;
5851 if(prevItem != VMA_NULL)
5853 prevItem->pNext = newItem;
5857 VMA_HEAVY_ASSERT(m_pFront == pItem);
5867 template<
typename T>
5868 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5870 if(pItem != VMA_NULL)
5872 ItemType*
const nextItem = pItem->pNext;
5873 ItemType*
const newItem = m_ItemAllocator.Alloc();
5874 newItem->pNext = nextItem;
5875 newItem->pPrev = pItem;
5876 pItem->pNext = newItem;
5877 if(nextItem != VMA_NULL)
5879 nextItem->pPrev = newItem;
5883 VMA_HEAVY_ASSERT(m_pBack == pItem);
5893 template<
typename T>
5894 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5896 ItemType*
const newItem = InsertBefore(pItem);
5897 newItem->Value = value;
5901 template<
typename T>
5902 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5904 ItemType*
const newItem = InsertAfter(pItem);
5905 newItem->Value = value;
5909 template<
typename T,
typename AllocatorT>
5912 VMA_CLASS_NO_COPY(VmaList)
5923 T& operator*()
const
5925 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5926 return m_pItem->Value;
5928 T* operator->()
const
5930 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5931 return &m_pItem->Value;
5934 iterator& operator++()
5936 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5937 m_pItem = m_pItem->pNext;
5940 iterator& operator--()
5942 if(m_pItem != VMA_NULL)
5944 m_pItem = m_pItem->pPrev;
5948 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5949 m_pItem = m_pList->Back();
5954 iterator operator++(
int)
5956 iterator result = *
this;
5960 iterator operator--(
int)
5962 iterator result = *
this;
5967 bool operator==(
const iterator& rhs)
const
5969 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5970 return m_pItem == rhs.m_pItem;
5972 bool operator!=(
const iterator& rhs)
const
5974 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5975 return m_pItem != rhs.m_pItem;
5979 VmaRawList<T>* m_pList;
5980 VmaListItem<T>* m_pItem;
5982 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5988 friend class VmaList<T, AllocatorT>;
5991 class const_iterator
6000 const_iterator(
const iterator& src) :
6001 m_pList(src.m_pList),
6002 m_pItem(src.m_pItem)
6006 const T& operator*()
const
6008 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
6009 return m_pItem->Value;
6011 const T* operator->()
const
6013 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
6014 return &m_pItem->Value;
6017 const_iterator& operator++()
6019 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
6020 m_pItem = m_pItem->pNext;
6023 const_iterator& operator--()
6025 if(m_pItem != VMA_NULL)
6027 m_pItem = m_pItem->pPrev;
6031 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
6032 m_pItem = m_pList->Back();
6037 const_iterator operator++(
int)
6039 const_iterator result = *
this;
6043 const_iterator operator--(
int)
6045 const_iterator result = *
this;
6050 bool operator==(
const const_iterator& rhs)
const
6052 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6053 return m_pItem == rhs.m_pItem;
6055 bool operator!=(
const const_iterator& rhs)
const
6057 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6058 return m_pItem != rhs.m_pItem;
6062 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
6068 const VmaRawList<T>* m_pList;
6069 const VmaListItem<T>* m_pItem;
6071 friend class VmaList<T, AllocatorT>;
6074 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
6076 bool empty()
const {
return m_RawList.IsEmpty(); }
6077 size_t size()
const {
return m_RawList.GetCount(); }
6079 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
6080 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
6082 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
6083 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
6085 const_iterator begin()
const {
return cbegin(); }
6086 const_iterator end()
const {
return cend(); }
6088 void clear() { m_RawList.Clear(); }
6089 void push_back(
const T& value) { m_RawList.PushBack(value); }
6090 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
6091 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
6094 VmaRawList<T> m_RawList;
6113 template<
typename ItemTypeTraits>
6114 class VmaIntrusiveLinkedList
6117 typedef typename ItemTypeTraits::ItemType ItemType;
6118 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
6119 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
6121 VmaIntrusiveLinkedList() { }
6122 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6123 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
6124 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
6126 src.m_Front = src.m_Back = VMA_NULL;
6129 ~VmaIntrusiveLinkedList()
6131 VMA_HEAVY_ASSERT(IsEmpty());
6133 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6134 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
6138 VMA_HEAVY_ASSERT(IsEmpty());
6139 m_Front = src.m_Front;
6140 m_Back = src.m_Back;
6141 m_Count = src.m_Count;
6142 src.m_Front = src.m_Back = VMA_NULL;
6151 ItemType* item = m_Back;
6152 while(item != VMA_NULL)
6154 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
6155 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6156 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6164 size_t GetCount()
const {
return m_Count; }
6165 bool IsEmpty()
const {
return m_Count == 0; }
6166 ItemType* Front() {
return m_Front; }
6167 const ItemType* Front()
const {
return m_Front; }
6168 ItemType* Back() {
return m_Back; }
6169 const ItemType* Back()
const {
return m_Back; }
6170 void PushBack(ItemType* item)
6172 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6181 ItemTypeTraits::AccessPrev(item) = m_Back;
6182 ItemTypeTraits::AccessNext(m_Back) = item;
6187 void PushFront(ItemType* item)
6189 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6198 ItemTypeTraits::AccessNext(item) = m_Front;
6199 ItemTypeTraits::AccessPrev(m_Front) = item;
6206 VMA_HEAVY_ASSERT(m_Count > 0);
6207 ItemType*
const backItem = m_Back;
6208 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
6209 if(prevItem != VMA_NULL)
6211 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
6215 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
6216 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
6219 ItemType* PopFront()
6221 VMA_HEAVY_ASSERT(m_Count > 0);
6222 ItemType*
const frontItem = m_Front;
6223 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
6224 if(nextItem != VMA_NULL)
6226 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
6230 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
6231 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
6236 void InsertBefore(ItemType* existingItem, ItemType* newItem)
6238 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6239 if(existingItem != VMA_NULL)
6241 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
6242 ItemTypeTraits::AccessPrev(newItem) = prevItem;
6243 ItemTypeTraits::AccessNext(newItem) = existingItem;
6244 ItemTypeTraits::AccessPrev(existingItem) = newItem;
6245 if(prevItem != VMA_NULL)
6247 ItemTypeTraits::AccessNext(prevItem) = newItem;
6251 VMA_HEAVY_ASSERT(m_Front == existingItem);
6260 void InsertAfter(ItemType* existingItem, ItemType* newItem)
6262 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6263 if(existingItem != VMA_NULL)
6265 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
6266 ItemTypeTraits::AccessNext(newItem) = nextItem;
6267 ItemTypeTraits::AccessPrev(newItem) = existingItem;
6268 ItemTypeTraits::AccessNext(existingItem) = newItem;
6269 if(nextItem != VMA_NULL)
6271 ItemTypeTraits::AccessPrev(nextItem) = newItem;
6275 VMA_HEAVY_ASSERT(m_Back == existingItem);
6281 return PushFront(newItem);
6283 void Remove(ItemType* item)
6285 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
6286 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
6288 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
6292 VMA_HEAVY_ASSERT(m_Front == item);
6293 m_Front = ItemTypeTraits::GetNext(item);
6296 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
6298 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
6302 VMA_HEAVY_ASSERT(m_Back == item);
6303 m_Back = ItemTypeTraits::GetPrev(item);
6305 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6306 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6310 ItemType* m_Front = VMA_NULL;
6311 ItemType* m_Back = VMA_NULL;
6321 #if VMA_USE_STL_UNORDERED_MAP
6323 #define VmaPair std::pair
6325 #define VMA_MAP_TYPE(KeyT, ValueT) \
6326 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
6330 template<
typename T1,
typename T2>
6336 VmaPair() : first(), second() { }
6337 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6343 template<
typename KeyT,
typename ValueT>
6347 typedef VmaPair<KeyT, ValueT> PairType;
6348 typedef PairType* iterator;
6350 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6352 iterator begin() {
return m_Vector.begin(); }
6353 iterator end() {
return m_Vector.end(); }
6355 void insert(
const PairType& pair);
6356 iterator find(
const KeyT& key);
6357 void erase(iterator it);
6360 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6363 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6365 template<
typename FirstT,
typename SecondT>
6366 struct VmaPairFirstLess
6368 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6370 return lhs.first < rhs.first;
6372 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6374 return lhs.first < rhsFirst;
6378 template<
typename KeyT,
typename ValueT>
6379 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6381 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6383 m_Vector.data() + m_Vector.size(),
6385 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6386 VmaVectorInsert(m_Vector, indexToInsert, pair);
6389 template<
typename KeyT,
typename ValueT>
6390 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6392 PairType* it = VmaBinaryFindFirstNotLess(
6394 m_Vector.data() + m_Vector.size(),
6396 VmaPairFirstLess<KeyT, ValueT>());
6397 if((it != m_Vector.end()) && (it->first == key))
6403 return m_Vector.end();
6407 template<
typename KeyT,
typename ValueT>
6408 void VmaMap<KeyT, ValueT>::erase(iterator it)
6410 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6419 class VmaDeviceMemoryBlock;
6421 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6423 struct VmaAllocation_T
6426 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6430 FLAG_USER_DATA_STRING = 0x01,
6434 enum ALLOCATION_TYPE
6436 ALLOCATION_TYPE_NONE,
6437 ALLOCATION_TYPE_BLOCK,
6438 ALLOCATION_TYPE_DEDICATED,
6445 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6448 m_pUserData{VMA_NULL},
6449 m_LastUseFrameIndex{currentFrameIndex},
6450 m_MemoryTypeIndex{0},
6451 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6452 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6454 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6456 #if VMA_STATS_STRING_ENABLED
6457 m_CreationFrameIndex = currentFrameIndex;
6458 m_BufferImageUsage = 0;
6464 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6467 VMA_ASSERT(m_pUserData == VMA_NULL);
6470 void InitBlockAllocation(
6471 VmaDeviceMemoryBlock* block,
6472 VkDeviceSize offset,
6473 VkDeviceSize alignment,
6475 uint32_t memoryTypeIndex,
6476 VmaSuballocationType suballocationType,
6480 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6481 VMA_ASSERT(block != VMA_NULL);
6482 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6483 m_Alignment = alignment;
6485 m_MemoryTypeIndex = memoryTypeIndex;
6486 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6487 m_SuballocationType = (uint8_t)suballocationType;
6488 m_BlockAllocation.m_Block = block;
6489 m_BlockAllocation.m_Offset = offset;
6490 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6495 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6496 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6497 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6498 m_MemoryTypeIndex = 0;
6499 m_BlockAllocation.m_Block = VMA_NULL;
6500 m_BlockAllocation.m_Offset = 0;
6501 m_BlockAllocation.m_CanBecomeLost =
true;
6504 void ChangeBlockAllocation(
6506 VmaDeviceMemoryBlock* block,
6507 VkDeviceSize offset);
6509 void ChangeOffset(VkDeviceSize newOffset);
6512 void InitDedicatedAllocation(
6513 uint32_t memoryTypeIndex,
6514 VkDeviceMemory hMemory,
6515 VmaSuballocationType suballocationType,
6519 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6520 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6521 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6524 m_MemoryTypeIndex = memoryTypeIndex;
6525 m_SuballocationType = (uint8_t)suballocationType;
6526 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6527 m_DedicatedAllocation.m_hMemory = hMemory;
6528 m_DedicatedAllocation.m_pMappedData = pMappedData;
6529 m_DedicatedAllocation.m_Prev = VMA_NULL;
6530 m_DedicatedAllocation.m_Next = VMA_NULL;
6533 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6534 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6535 VkDeviceSize GetSize()
const {
return m_Size; }
6536 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6537 void* GetUserData()
const {
return m_pUserData; }
6538 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6539 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6541 VmaDeviceMemoryBlock* GetBlock()
const
6543 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6544 return m_BlockAllocation.m_Block;
6546 VkDeviceSize GetOffset()
const;
6547 VkDeviceMemory GetMemory()
const;
6548 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6549 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6550 void* GetMappedData()
const;
6551 bool CanBecomeLost()
const;
6553 uint32_t GetLastUseFrameIndex()
const
6555 return m_LastUseFrameIndex.load();
6557 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6559 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6569 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6571 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6573 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6584 void BlockAllocMap();
6585 void BlockAllocUnmap();
6586 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6589 #if VMA_STATS_STRING_ENABLED
6590 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6591 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6593 void InitBufferImageUsage(uint32_t bufferImageUsage)
6595 VMA_ASSERT(m_BufferImageUsage == 0);
6596 m_BufferImageUsage = bufferImageUsage;
6599 void PrintParameters(
class VmaJsonWriter& json)
const;
6603 VkDeviceSize m_Alignment;
6604 VkDeviceSize m_Size;
6606 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6607 uint32_t m_MemoryTypeIndex;
6609 uint8_t m_SuballocationType;
6616 struct BlockAllocation
6618 VmaDeviceMemoryBlock* m_Block;
6619 VkDeviceSize m_Offset;
6620 bool m_CanBecomeLost;
6624 struct DedicatedAllocation
6626 VkDeviceMemory m_hMemory;
6627 void* m_pMappedData;
6628 VmaAllocation_T* m_Prev;
6629 VmaAllocation_T* m_Next;
6635 BlockAllocation m_BlockAllocation;
6637 DedicatedAllocation m_DedicatedAllocation;
6640 #if VMA_STATS_STRING_ENABLED
6641 uint32_t m_CreationFrameIndex;
6642 uint32_t m_BufferImageUsage;
6647 friend struct VmaDedicatedAllocationListItemTraits;
6650 struct VmaDedicatedAllocationListItemTraits
6652 typedef VmaAllocation_T ItemType;
6653 static ItemType* GetPrev(
const ItemType* item)
6655 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6656 return item->m_DedicatedAllocation.m_Prev;
6658 static ItemType* GetNext(
const ItemType* item)
6660 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6661 return item->m_DedicatedAllocation.m_Next;
6663 static ItemType*& AccessPrev(ItemType* item)
6665 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6666 return item->m_DedicatedAllocation.m_Prev;
6668 static ItemType*& AccessNext(ItemType* item){
6669 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6670 return item->m_DedicatedAllocation.m_Next;
6678 struct VmaSuballocation
6680 VkDeviceSize offset;
6683 VmaSuballocationType type;
6687 struct VmaSuballocationOffsetLess
6689 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6691 return lhs.offset < rhs.offset;
6694 struct VmaSuballocationOffsetGreater
6696 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6698 return lhs.offset > rhs.offset;
6702 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6705 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6707 enum class VmaAllocationRequestType
6729 struct VmaAllocationRequest
6731 VkDeviceSize offset;
6732 VkDeviceSize sumFreeSize;
6733 VkDeviceSize sumItemSize;
6734 VmaSuballocationList::iterator item;
6735 size_t itemsToMakeLostCount;
6737 VmaAllocationRequestType type;
6739 VkDeviceSize CalcCost()
const
6741 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6749 class VmaBlockMetadata
6753 virtual ~VmaBlockMetadata() { }
6754 virtual void Init(VkDeviceSize size) { m_Size = size; }
6757 virtual bool Validate()
const = 0;
6758 VkDeviceSize GetSize()
const {
return m_Size; }
6759 virtual size_t GetAllocationCount()
const = 0;
6760 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6761 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6763 virtual bool IsEmpty()
const = 0;
6765 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6767 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6769 #if VMA_STATS_STRING_ENABLED
6770 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6776 virtual bool CreateAllocationRequest(
6777 uint32_t currentFrameIndex,
6778 uint32_t frameInUseCount,
6779 VkDeviceSize bufferImageGranularity,
6780 VkDeviceSize allocSize,
6781 VkDeviceSize allocAlignment,
6783 VmaSuballocationType allocType,
6784 bool canMakeOtherLost,
6787 VmaAllocationRequest* pAllocationRequest) = 0;
6789 virtual bool MakeRequestedAllocationsLost(
6790 uint32_t currentFrameIndex,
6791 uint32_t frameInUseCount,
6792 VmaAllocationRequest* pAllocationRequest) = 0;
6794 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6796 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6800 const VmaAllocationRequest& request,
6801 VmaSuballocationType type,
6802 VkDeviceSize allocSize,
6807 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6810 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6812 #if VMA_STATS_STRING_ENABLED
6813 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6814 VkDeviceSize unusedBytes,
6815 size_t allocationCount,
6816 size_t unusedRangeCount)
const;
6817 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6818 VkDeviceSize offset,
6820 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6821 VkDeviceSize offset,
6822 VkDeviceSize size)
const;
6823 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6827 VkDeviceSize m_Size;
6828 const VkAllocationCallbacks* m_pAllocationCallbacks;
6831 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6832 VMA_ASSERT(0 && "Validation failed: " #cond); \
6836 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6838 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6841 virtual ~VmaBlockMetadata_Generic();
6842 virtual void Init(VkDeviceSize size);
6844 virtual bool Validate()
const;
6845 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6846 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6847 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6848 virtual bool IsEmpty()
const;
6850 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6851 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6853 #if VMA_STATS_STRING_ENABLED
6854 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6857 virtual bool CreateAllocationRequest(
6858 uint32_t currentFrameIndex,
6859 uint32_t frameInUseCount,
6860 VkDeviceSize bufferImageGranularity,
6861 VkDeviceSize allocSize,
6862 VkDeviceSize allocAlignment,
6864 VmaSuballocationType allocType,
6865 bool canMakeOtherLost,
6867 VmaAllocationRequest* pAllocationRequest);
6869 virtual bool MakeRequestedAllocationsLost(
6870 uint32_t currentFrameIndex,
6871 uint32_t frameInUseCount,
6872 VmaAllocationRequest* pAllocationRequest);
6874 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6876 virtual VkResult CheckCorruption(
const void* pBlockData);
6879 const VmaAllocationRequest& request,
6880 VmaSuballocationType type,
6881 VkDeviceSize allocSize,
6885 virtual void FreeAtOffset(VkDeviceSize offset);
6890 bool IsBufferImageGranularityConflictPossible(
6891 VkDeviceSize bufferImageGranularity,
6892 VmaSuballocationType& inOutPrevSuballocType)
const;
6895 friend class VmaDefragmentationAlgorithm_Generic;
6896 friend class VmaDefragmentationAlgorithm_Fast;
6898 uint32_t m_FreeCount;
6899 VkDeviceSize m_SumFreeSize;
6900 VmaSuballocationList m_Suballocations;
6903 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6905 bool ValidateFreeSuballocationList()
const;
6909 bool CheckAllocation(
6910 uint32_t currentFrameIndex,
6911 uint32_t frameInUseCount,
6912 VkDeviceSize bufferImageGranularity,
6913 VkDeviceSize allocSize,
6914 VkDeviceSize allocAlignment,
6915 VmaSuballocationType allocType,
6916 VmaSuballocationList::const_iterator suballocItem,
6917 bool canMakeOtherLost,
6918 VkDeviceSize* pOffset,
6919 size_t* itemsToMakeLostCount,
6920 VkDeviceSize* pSumFreeSize,
6921 VkDeviceSize* pSumItemSize)
const;
6923 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6927 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6930 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6933 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
7014 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
7016 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
7019 virtual ~VmaBlockMetadata_Linear();
7020 virtual void Init(VkDeviceSize size);
7022 virtual bool Validate()
const;
7023 virtual size_t GetAllocationCount()
const;
7024 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
7025 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7026 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
7028 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7029 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7031 #if VMA_STATS_STRING_ENABLED
7032 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7035 virtual bool CreateAllocationRequest(
7036 uint32_t currentFrameIndex,
7037 uint32_t frameInUseCount,
7038 VkDeviceSize bufferImageGranularity,
7039 VkDeviceSize allocSize,
7040 VkDeviceSize allocAlignment,
7042 VmaSuballocationType allocType,
7043 bool canMakeOtherLost,
7045 VmaAllocationRequest* pAllocationRequest);
7047 virtual bool MakeRequestedAllocationsLost(
7048 uint32_t currentFrameIndex,
7049 uint32_t frameInUseCount,
7050 VmaAllocationRequest* pAllocationRequest);
7052 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7054 virtual VkResult CheckCorruption(
const void* pBlockData);
7057 const VmaAllocationRequest& request,
7058 VmaSuballocationType type,
7059 VkDeviceSize allocSize,
7063 virtual void FreeAtOffset(VkDeviceSize offset);
7073 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
7075 enum SECOND_VECTOR_MODE
7077 SECOND_VECTOR_EMPTY,
7082 SECOND_VECTOR_RING_BUFFER,
7088 SECOND_VECTOR_DOUBLE_STACK,
7091 VkDeviceSize m_SumFreeSize;
7092 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7093 uint32_t m_1stVectorIndex;
7094 SECOND_VECTOR_MODE m_2ndVectorMode;
7096 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7097 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7098 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7099 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7102 size_t m_1stNullItemsBeginCount;
7104 size_t m_1stNullItemsMiddleCount;
7106 size_t m_2ndNullItemsCount;
7108 bool ShouldCompact1st()
const;
7109 void CleanupAfterFree();
7111 bool CreateAllocationRequest_LowerAddress(
7112 uint32_t currentFrameIndex,
7113 uint32_t frameInUseCount,
7114 VkDeviceSize bufferImageGranularity,
7115 VkDeviceSize allocSize,
7116 VkDeviceSize allocAlignment,
7117 VmaSuballocationType allocType,
7118 bool canMakeOtherLost,
7120 VmaAllocationRequest* pAllocationRequest);
7121 bool CreateAllocationRequest_UpperAddress(
7122 uint32_t currentFrameIndex,
7123 uint32_t frameInUseCount,
7124 VkDeviceSize bufferImageGranularity,
7125 VkDeviceSize allocSize,
7126 VkDeviceSize allocAlignment,
7127 VmaSuballocationType allocType,
7128 bool canMakeOtherLost,
7130 VmaAllocationRequest* pAllocationRequest);
7144 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
7146 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
7149 virtual ~VmaBlockMetadata_Buddy();
7150 virtual void Init(VkDeviceSize size);
7152 virtual bool Validate()
const;
7153 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
7154 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
7155 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7156 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
7158 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7159 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7161 #if VMA_STATS_STRING_ENABLED
7162 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7165 virtual bool CreateAllocationRequest(
7166 uint32_t currentFrameIndex,
7167 uint32_t frameInUseCount,
7168 VkDeviceSize bufferImageGranularity,
7169 VkDeviceSize allocSize,
7170 VkDeviceSize allocAlignment,
7172 VmaSuballocationType allocType,
7173 bool canMakeOtherLost,
7175 VmaAllocationRequest* pAllocationRequest);
7177 virtual bool MakeRequestedAllocationsLost(
7178 uint32_t currentFrameIndex,
7179 uint32_t frameInUseCount,
7180 VmaAllocationRequest* pAllocationRequest);
7182 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7184 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
7187 const VmaAllocationRequest& request,
7188 VmaSuballocationType type,
7189 VkDeviceSize allocSize,
7192 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
7193 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
7196 static const VkDeviceSize MIN_NODE_SIZE = 32;
7197 static const size_t MAX_LEVELS = 30;
7199 struct ValidationContext
7201 size_t calculatedAllocationCount;
7202 size_t calculatedFreeCount;
7203 VkDeviceSize calculatedSumFreeSize;
7205 ValidationContext() :
7206 calculatedAllocationCount(0),
7207 calculatedFreeCount(0),
7208 calculatedSumFreeSize(0) { }
7213 VkDeviceSize offset;
7243 VkDeviceSize m_UsableSize;
7244 uint32_t m_LevelCount;
7250 } m_FreeList[MAX_LEVELS];
7252 size_t m_AllocationCount;
7256 VkDeviceSize m_SumFreeSize;
7258 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
7259 void DeleteNode(Node* node);
7260 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
7261 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
7262 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
7264 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
7265 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
7269 void AddToFreeListFront(uint32_t level, Node* node);
7273 void RemoveFromFreeList(uint32_t level, Node* node);
7275 #if VMA_STATS_STRING_ENABLED
7276 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
7286 class VmaDeviceMemoryBlock
7288 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
7290 VmaBlockMetadata* m_pMetadata;
7294 ~VmaDeviceMemoryBlock()
7296 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
7297 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
7304 uint32_t newMemoryTypeIndex,
7305 VkDeviceMemory newMemory,
7306 VkDeviceSize newSize,
7308 uint32_t algorithm);
7312 VmaPool GetParentPool()
const {
return m_hParentPool; }
7313 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
7314 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7315 uint32_t GetId()
const {
return m_Id; }
7316 void* GetMappedData()
const {
return m_pMappedData; }
7319 bool Validate()
const;
7324 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
7327 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7328 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7330 VkResult BindBufferMemory(
7333 VkDeviceSize allocationLocalOffset,
7336 VkResult BindImageMemory(
7339 VkDeviceSize allocationLocalOffset,
7345 uint32_t m_MemoryTypeIndex;
7347 VkDeviceMemory m_hMemory;
7355 uint32_t m_MapCount;
7356 void* m_pMappedData;
7359 struct VmaDefragmentationMove
7361 size_t srcBlockIndex;
7362 size_t dstBlockIndex;
7363 VkDeviceSize srcOffset;
7364 VkDeviceSize dstOffset;
7367 VmaDeviceMemoryBlock* pSrcBlock;
7368 VmaDeviceMemoryBlock* pDstBlock;
7371 class VmaDefragmentationAlgorithm;
7379 struct VmaBlockVector
7381 VMA_CLASS_NO_COPY(VmaBlockVector)
7386 uint32_t memoryTypeIndex,
7387 VkDeviceSize preferredBlockSize,
7388 size_t minBlockCount,
7389 size_t maxBlockCount,
7390 VkDeviceSize bufferImageGranularity,
7391 uint32_t frameInUseCount,
7392 bool explicitBlockSize,
7395 VkDeviceSize minAllocationAlignment,
7396 void* pMemoryAllocateNext);
7399 VkResult CreateMinBlocks();
7401 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7402 VmaPool GetParentPool()
const {
return m_hParentPool; }
7403 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7404 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7405 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7406 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7407 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7408 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7413 bool IsCorruptionDetectionEnabled()
const;
7416 uint32_t currentFrameIndex,
7418 VkDeviceSize alignment,
7420 VmaSuballocationType suballocType,
7421 size_t allocationCount,
7429 #if VMA_STATS_STRING_ENABLED
7430 void PrintDetailedMap(
class VmaJsonWriter& json);
7433 void MakePoolAllocationsLost(
7434 uint32_t currentFrameIndex,
7435 size_t* pLostAllocationCount);
7436 VkResult CheckCorruption();
7440 class VmaBlockVectorDefragmentationContext* pCtx,
7442 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7443 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7444 VkCommandBuffer commandBuffer);
7445 void DefragmentationEnd(
7446 class VmaBlockVectorDefragmentationContext* pCtx,
7450 uint32_t ProcessDefragmentations(
7451 class VmaBlockVectorDefragmentationContext *pCtx,
7454 void CommitDefragmentations(
7455 class VmaBlockVectorDefragmentationContext *pCtx,
7461 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7462 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7463 size_t CalcAllocationCount()
const;
7464 bool IsBufferImageGranularityConflictPossible()
const;
7467 friend class VmaDefragmentationAlgorithm_Generic;
7471 const uint32_t m_MemoryTypeIndex;
7472 const VkDeviceSize m_PreferredBlockSize;
7473 const size_t m_MinBlockCount;
7474 const size_t m_MaxBlockCount;
7475 const VkDeviceSize m_BufferImageGranularity;
7476 const uint32_t m_FrameInUseCount;
7477 const bool m_ExplicitBlockSize;
7478 const uint32_t m_Algorithm;
7479 const float m_Priority;
7480 const VkDeviceSize m_MinAllocationAlignment;
7481 void*
const m_pMemoryAllocateNext;
7482 VMA_RW_MUTEX m_Mutex;
7486 bool m_HasEmptyBlock;
7488 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7489 uint32_t m_NextBlockId;
7491 VkDeviceSize CalcMaxBlockSize()
const;
7494 void Remove(VmaDeviceMemoryBlock* pBlock);
7498 void IncrementallySortBlocks();
7500 VkResult AllocatePage(
7501 uint32_t currentFrameIndex,
7503 VkDeviceSize alignment,
7505 VmaSuballocationType suballocType,
7509 VkResult AllocateFromBlock(
7510 VmaDeviceMemoryBlock* pBlock,
7511 uint32_t currentFrameIndex,
7513 VkDeviceSize alignment,
7516 VmaSuballocationType suballocType,
7520 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7523 void ApplyDefragmentationMovesCpu(
7524 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7525 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7527 void ApplyDefragmentationMovesGpu(
7528 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7529 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7530 VkCommandBuffer commandBuffer);
7538 void UpdateHasEmptyBlock();
7543 VMA_CLASS_NO_COPY(VmaPool_T)
7545 VmaBlockVector m_BlockVector;
7550 VkDeviceSize preferredBlockSize);
7553 uint32_t GetId()
const {
return m_Id; }
7554 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7556 const char* GetName()
const {
return m_Name; }
7557 void SetName(
const char* pName);
7559 #if VMA_STATS_STRING_ENABLED
7566 VmaPool_T* m_PrevPool = VMA_NULL;
7567 VmaPool_T* m_NextPool = VMA_NULL;
7568 friend struct VmaPoolListItemTraits;
7571 struct VmaPoolListItemTraits
7573 typedef VmaPool_T ItemType;
7574 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
7575 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
7576 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
7577 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
7587 class VmaDefragmentationAlgorithm
7589 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7591 VmaDefragmentationAlgorithm(
7593 VmaBlockVector* pBlockVector,
7594 uint32_t currentFrameIndex) :
7595 m_hAllocator(hAllocator),
7596 m_pBlockVector(pBlockVector),
7597 m_CurrentFrameIndex(currentFrameIndex)
7600 virtual ~VmaDefragmentationAlgorithm()
7604 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7605 virtual void AddAll() = 0;
7607 virtual VkResult Defragment(
7608 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7609 VkDeviceSize maxBytesToMove,
7610 uint32_t maxAllocationsToMove,
7613 virtual VkDeviceSize GetBytesMoved()
const = 0;
7614 virtual uint32_t GetAllocationsMoved()
const = 0;
7618 VmaBlockVector*
const m_pBlockVector;
7619 const uint32_t m_CurrentFrameIndex;
7621 struct AllocationInfo
7624 VkBool32* m_pChanged;
7627 m_hAllocation(VK_NULL_HANDLE),
7628 m_pChanged(VMA_NULL)
7632 m_hAllocation(hAlloc),
7633 m_pChanged(pChanged)
7639 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7641 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7643 VmaDefragmentationAlgorithm_Generic(
7645 VmaBlockVector* pBlockVector,
7646 uint32_t currentFrameIndex,
7647 bool overlappingMoveSupported);
7648 virtual ~VmaDefragmentationAlgorithm_Generic();
7650 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7651 virtual void AddAll() { m_AllAllocations =
true; }
7653 virtual VkResult Defragment(
7654 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7655 VkDeviceSize maxBytesToMove,
7656 uint32_t maxAllocationsToMove,
7659 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7660 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7663 uint32_t m_AllocationCount;
7664 bool m_AllAllocations;
7666 VkDeviceSize m_BytesMoved;
7667 uint32_t m_AllocationsMoved;
7669 struct AllocationInfoSizeGreater
7671 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7673 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7677 struct AllocationInfoOffsetGreater
7679 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7681 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7687 size_t m_OriginalBlockIndex;
7688 VmaDeviceMemoryBlock* m_pBlock;
7689 bool m_HasNonMovableAllocations;
7690 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7692 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7693 m_OriginalBlockIndex(SIZE_MAX),
7695 m_HasNonMovableAllocations(true),
7696 m_Allocations(pAllocationCallbacks)
7700 void CalcHasNonMovableAllocations()
7702 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7703 const size_t defragmentAllocCount = m_Allocations.size();
7704 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7707 void SortAllocationsBySizeDescending()
7709 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7712 void SortAllocationsByOffsetDescending()
7714 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7718 struct BlockPointerLess
7720 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7722 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7724 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7726 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7732 struct BlockInfoCompareMoveDestination
7734 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7736 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7740 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7744 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7752 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7753 BlockInfoVector m_Blocks;
7755 VkResult DefragmentRound(
7756 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7757 VkDeviceSize maxBytesToMove,
7758 uint32_t maxAllocationsToMove,
7759 bool freeOldAllocations);
7761 size_t CalcBlocksWithNonMovableCount()
const;
7763 static bool MoveMakesSense(
7764 size_t dstBlockIndex, VkDeviceSize dstOffset,
7765 size_t srcBlockIndex, VkDeviceSize srcOffset);
7768 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7770 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7772 VmaDefragmentationAlgorithm_Fast(
7774 VmaBlockVector* pBlockVector,
7775 uint32_t currentFrameIndex,
7776 bool overlappingMoveSupported);
7777 virtual ~VmaDefragmentationAlgorithm_Fast();
7779 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7780 virtual void AddAll() { m_AllAllocations =
true; }
7782 virtual VkResult Defragment(
7783 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7784 VkDeviceSize maxBytesToMove,
7785 uint32_t maxAllocationsToMove,
7788 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7789 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7794 size_t origBlockIndex;
7797 class FreeSpaceDatabase
7803 s.blockInfoIndex = SIZE_MAX;
7804 for(
size_t i = 0; i < MAX_COUNT; ++i)
7806 m_FreeSpaces[i] = s;
7810 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7812 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7818 size_t bestIndex = SIZE_MAX;
7819 for(
size_t i = 0; i < MAX_COUNT; ++i)
7822 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7827 if(m_FreeSpaces[i].size < size &&
7828 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7834 if(bestIndex != SIZE_MAX)
7836 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7837 m_FreeSpaces[bestIndex].offset = offset;
7838 m_FreeSpaces[bestIndex].size = size;
7842 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7843 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7845 size_t bestIndex = SIZE_MAX;
7846 VkDeviceSize bestFreeSpaceAfter = 0;
7847 for(
size_t i = 0; i < MAX_COUNT; ++i)
7850 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7852 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7854 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7856 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7858 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7861 bestFreeSpaceAfter = freeSpaceAfter;
7867 if(bestIndex != SIZE_MAX)
7869 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7870 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7872 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7875 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7876 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7877 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7882 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7892 static const size_t MAX_COUNT = 4;
7896 size_t blockInfoIndex;
7897 VkDeviceSize offset;
7899 } m_FreeSpaces[MAX_COUNT];
7902 const bool m_OverlappingMoveSupported;
7904 uint32_t m_AllocationCount;
7905 bool m_AllAllocations;
7907 VkDeviceSize m_BytesMoved;
7908 uint32_t m_AllocationsMoved;
7910 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7912 void PreprocessMetadata();
7913 void PostprocessMetadata();
7914 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7917 struct VmaBlockDefragmentationContext
7921 BLOCK_FLAG_USED = 0x00000001,
7927 class VmaBlockVectorDefragmentationContext
7929 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7933 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7934 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7935 uint32_t defragmentationMovesProcessed;
7936 uint32_t defragmentationMovesCommitted;
7937 bool hasDefragmentationPlan;
7939 VmaBlockVectorDefragmentationContext(
7942 VmaBlockVector* pBlockVector,
7943 uint32_t currFrameIndex);
7944 ~VmaBlockVectorDefragmentationContext();
7946 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7947 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7948 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7950 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7951 void AddAll() { m_AllAllocations =
true; }
7960 VmaBlockVector*
const m_pBlockVector;
7961 const uint32_t m_CurrFrameIndex;
7963 VmaDefragmentationAlgorithm* m_pAlgorithm;
7971 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7972 bool m_AllAllocations;
7975 struct VmaDefragmentationContext_T
7978 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7980 VmaDefragmentationContext_T(
7982 uint32_t currFrameIndex,
7985 ~VmaDefragmentationContext_T();
7987 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7988 void AddAllocations(
7989 uint32_t allocationCount,
7991 VkBool32* pAllocationsChanged);
7999 VkResult Defragment(
8000 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
8001 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
8005 VkResult DefragmentPassEnd();
8009 const uint32_t m_CurrFrameIndex;
8010 const uint32_t m_Flags;
8013 VkDeviceSize m_MaxCpuBytesToMove;
8014 uint32_t m_MaxCpuAllocationsToMove;
8015 VkDeviceSize m_MaxGpuBytesToMove;
8016 uint32_t m_MaxGpuAllocationsToMove;
8019 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
8021 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
8024 #if VMA_RECORDING_ENABLED
8031 void WriteConfiguration(
8032 const VkPhysicalDeviceProperties& devProps,
8033 const VkPhysicalDeviceMemoryProperties& memProps,
8034 uint32_t vulkanApiVersion,
8035 bool dedicatedAllocationExtensionEnabled,
8036 bool bindMemory2ExtensionEnabled,
8037 bool memoryBudgetExtensionEnabled,
8038 bool deviceCoherentMemoryExtensionEnabled);
8041 void RecordCreateAllocator(uint32_t frameIndex);
8042 void RecordDestroyAllocator(uint32_t frameIndex);
8043 void RecordCreatePool(uint32_t frameIndex,
8046 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
8047 void RecordAllocateMemory(uint32_t frameIndex,
8048 const VkMemoryRequirements& vkMemReq,
8051 void RecordAllocateMemoryPages(uint32_t frameIndex,
8052 const VkMemoryRequirements& vkMemReq,
8054 uint64_t allocationCount,
8056 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
8057 const VkMemoryRequirements& vkMemReq,
8058 bool requiresDedicatedAllocation,
8059 bool prefersDedicatedAllocation,
8062 void RecordAllocateMemoryForImage(uint32_t frameIndex,
8063 const VkMemoryRequirements& vkMemReq,
8064 bool requiresDedicatedAllocation,
8065 bool prefersDedicatedAllocation,
8068 void RecordFreeMemory(uint32_t frameIndex,
8070 void RecordFreeMemoryPages(uint32_t frameIndex,
8071 uint64_t allocationCount,
8073 void RecordSetAllocationUserData(uint32_t frameIndex,
8075 const void* pUserData);
8076 void RecordCreateLostAllocation(uint32_t frameIndex,
8078 void RecordMapMemory(uint32_t frameIndex,
8080 void RecordUnmapMemory(uint32_t frameIndex,
8082 void RecordFlushAllocation(uint32_t frameIndex,
8083 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8084 void RecordInvalidateAllocation(uint32_t frameIndex,
8085 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8086 void RecordCreateBuffer(uint32_t frameIndex,
8087 const VkBufferCreateInfo& bufCreateInfo,
8090 void RecordCreateImage(uint32_t frameIndex,
8091 const VkImageCreateInfo& imageCreateInfo,
8094 void RecordDestroyBuffer(uint32_t frameIndex,
8096 void RecordDestroyImage(uint32_t frameIndex,
8098 void RecordTouchAllocation(uint32_t frameIndex,
8100 void RecordGetAllocationInfo(uint32_t frameIndex,
8102 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
8104 void RecordDefragmentationBegin(uint32_t frameIndex,
8107 void RecordDefragmentationEnd(uint32_t frameIndex,
8109 void RecordSetPoolName(uint32_t frameIndex,
8120 class UserDataString
8124 const char* GetString()
const {
return m_Str; }
8134 VMA_MUTEX m_FileMutex;
8135 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
8137 void GetBasicParams(CallParams& outParams);
8140 template<
typename T>
8141 void PrintPointerList(uint64_t count,
const T* pItems)
8145 fprintf(m_File,
"%p", pItems[0]);
8146 for(uint64_t i = 1; i < count; ++i)
8148 fprintf(m_File,
" %p", pItems[i]);
8153 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
8162 class VmaAllocationObjectAllocator
8164 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
8166 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
8168 template<
typename... Types>
VmaAllocation Allocate(Types... args);
8173 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
8176 struct VmaCurrentBudgetData
8178 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
8179 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
8181 #if VMA_MEMORY_BUDGET
8182 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
8183 VMA_RW_MUTEX m_BudgetMutex;
8184 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
8185 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
8186 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
8189 VmaCurrentBudgetData()
8191 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
8193 m_BlockBytes[heapIndex] = 0;
8194 m_AllocationBytes[heapIndex] = 0;
8195 #if VMA_MEMORY_BUDGET
8196 m_VulkanUsage[heapIndex] = 0;
8197 m_VulkanBudget[heapIndex] = 0;
8198 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
8202 #if VMA_MEMORY_BUDGET
8203 m_OperationsSinceBudgetFetch = 0;
8207 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8209 m_AllocationBytes[heapIndex] += allocationSize;
8210 #if VMA_MEMORY_BUDGET
8211 ++m_OperationsSinceBudgetFetch;
8215 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8217 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
8218 m_AllocationBytes[heapIndex] -= allocationSize;
8219 #if VMA_MEMORY_BUDGET
8220 ++m_OperationsSinceBudgetFetch;
8226 struct VmaAllocator_T
8228 VMA_CLASS_NO_COPY(VmaAllocator_T)
8231 uint32_t m_VulkanApiVersion;
8232 bool m_UseKhrDedicatedAllocation;
8233 bool m_UseKhrBindMemory2;
8234 bool m_UseExtMemoryBudget;
8235 bool m_UseAmdDeviceCoherentMemory;
8236 bool m_UseKhrBufferDeviceAddress;
8237 bool m_UseExtMemoryPriority;
8239 VkInstance m_hInstance;
8240 bool m_AllocationCallbacksSpecified;
8241 VkAllocationCallbacks m_AllocationCallbacks;
8243 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
8246 uint32_t m_HeapSizeLimitMask;
8248 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
8249 VkPhysicalDeviceMemoryProperties m_MemProps;
8252 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
8254 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
8255 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
8256 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
8258 VmaCurrentBudgetData m_Budget;
8259 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
8265 const VkAllocationCallbacks* GetAllocationCallbacks()
const
8267 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
8271 return m_VulkanFunctions;
8274 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
8276 VkDeviceSize GetBufferImageGranularity()
const
8279 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
8280 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
8283 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
8284 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
8286 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
8288 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
8289 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
8292 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
8294 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
8295 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8298 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
8300 return IsMemoryTypeNonCoherent(memTypeIndex) ?
8301 VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
8302 (VkDeviceSize)VMA_MIN_ALIGNMENT;
8305 bool IsIntegratedGpu()
const
8307 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
8310 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
8312 #if VMA_RECORDING_ENABLED
8313 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
8316 void GetBufferMemoryRequirements(
8318 VkMemoryRequirements& memReq,
8319 bool& requiresDedicatedAllocation,
8320 bool& prefersDedicatedAllocation)
const;
8321 void GetImageMemoryRequirements(
8323 VkMemoryRequirements& memReq,
8324 bool& requiresDedicatedAllocation,
8325 bool& prefersDedicatedAllocation)
const;
8328 VkResult AllocateMemory(
8329 const VkMemoryRequirements& vkMemReq,
8330 bool requiresDedicatedAllocation,
8331 bool prefersDedicatedAllocation,
8332 VkBuffer dedicatedBuffer,
8333 VkBufferUsageFlags dedicatedBufferUsage,
8334 VkImage dedicatedImage,
8336 VmaSuballocationType suballocType,
8337 size_t allocationCount,
8342 size_t allocationCount,
8345 void CalculateStats(
VmaStats* pStats);
8348 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
8350 #if VMA_STATS_STRING_ENABLED
8351 void PrintDetailedMap(
class VmaJsonWriter& json);
8354 VkResult DefragmentationBegin(
8358 VkResult DefragmentationEnd(
8361 VkResult DefragmentationPassBegin(
8364 VkResult DefragmentationPassEnd(
8371 void DestroyPool(
VmaPool pool);
8374 void SetCurrentFrameIndex(uint32_t frameIndex);
8375 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8377 void MakePoolAllocationsLost(
8379 size_t* pLostAllocationCount);
8380 VkResult CheckPoolCorruption(
VmaPool hPool);
8381 VkResult CheckCorruption(uint32_t memoryTypeBits);
8386 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8388 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8390 VkResult BindVulkanBuffer(
8391 VkDeviceMemory memory,
8392 VkDeviceSize memoryOffset,
8396 VkResult BindVulkanImage(
8397 VkDeviceMemory memory,
8398 VkDeviceSize memoryOffset,
8405 VkResult BindBufferMemory(
8407 VkDeviceSize allocationLocalOffset,
8410 VkResult BindImageMemory(
8412 VkDeviceSize allocationLocalOffset,
8416 VkResult FlushOrInvalidateAllocation(
8418 VkDeviceSize offset, VkDeviceSize size,
8419 VMA_CACHE_OPERATION op);
8420 VkResult FlushOrInvalidateAllocations(
8421 uint32_t allocationCount,
8423 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8424 VMA_CACHE_OPERATION op);
8426 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8432 uint32_t GetGpuDefragmentationMemoryTypeBits();
8434 #if VMA_EXTERNAL_MEMORY
8435 VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex)
const
8437 return m_TypeExternalMemoryHandleTypes[memTypeIndex];
8442 VkDeviceSize m_PreferredLargeHeapBlockSize;
8444 VkPhysicalDevice m_PhysicalDevice;
8445 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8446 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8447 #if VMA_EXTERNAL_MEMORY
8448 VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES];
8451 VMA_RW_MUTEX m_PoolsMutex;
8452 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
8455 uint32_t m_NextPoolId;
8460 uint32_t m_GlobalMemoryTypeBits;
8462 #if VMA_RECORDING_ENABLED
8463 VmaRecorder* m_pRecorder;
8468 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8469 void ImportVulkanFunctions_Static();
8474 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8475 void ImportVulkanFunctions_Dynamic();
8478 void ValidateVulkanFunctions();
8480 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8482 VkResult AllocateMemoryOfType(
8484 VkDeviceSize alignment,
8485 bool dedicatedAllocation,
8486 VkBuffer dedicatedBuffer,
8487 VkBufferUsageFlags dedicatedBufferUsage,
8488 VkImage dedicatedImage,
8490 uint32_t memTypeIndex,
8491 VmaSuballocationType suballocType,
8492 size_t allocationCount,
8496 VkResult AllocateDedicatedMemoryPage(
8498 VmaSuballocationType suballocType,
8499 uint32_t memTypeIndex,
8500 const VkMemoryAllocateInfo& allocInfo,
8502 bool isUserDataString,
8507 VkResult AllocateDedicatedMemory(
8509 VmaSuballocationType suballocType,
8510 uint32_t memTypeIndex,
8513 bool isUserDataString,
8516 VkBuffer dedicatedBuffer,
8517 VkBufferUsageFlags dedicatedBufferUsage,
8518 VkImage dedicatedImage,
8519 size_t allocationCount,
8528 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8530 uint32_t CalculateGlobalMemoryTypeBits()
const;
8532 bool GetFlushOrInvalidateRange(
8534 VkDeviceSize offset, VkDeviceSize size,
8535 VkMappedMemoryRange& outRange)
const;
8537 #if VMA_MEMORY_BUDGET
8538 void UpdateVulkanBudget();
8545 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8547 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8550 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8552 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8555 template<
typename T>
8558 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8561 template<
typename T>
8562 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8564 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8567 template<
typename T>
8568 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8573 VmaFree(hAllocator, ptr);
8577 template<
typename T>
8578 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8582 for(
size_t i = count; i--; )
8584 VmaFree(hAllocator, ptr);
8591 #if VMA_STATS_STRING_ENABLED
8593 class VmaStringBuilder
8596 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8597 size_t GetLength()
const {
return m_Data.size(); }
8598 const char* GetData()
const {
return m_Data.data(); }
8600 void Add(
char ch) { m_Data.push_back(ch); }
8601 void Add(
const char* pStr);
8602 void AddNewLine() { Add(
'\n'); }
8603 void AddNumber(uint32_t num);
8604 void AddNumber(uint64_t num);
8605 void AddPointer(
const void* ptr);
8608 VmaVector< char, VmaStlAllocator<char> > m_Data;
8611 void VmaStringBuilder::Add(
const char* pStr)
8613 const size_t strLen = strlen(pStr);
8616 const size_t oldCount = m_Data.size();
8617 m_Data.resize(oldCount + strLen);
8618 memcpy(m_Data.data() + oldCount, pStr, strLen);
8622 void VmaStringBuilder::AddNumber(uint32_t num)
8629 *--p =
'0' + (num % 10);
8636 void VmaStringBuilder::AddNumber(uint64_t num)
8643 *--p =
'0' + (num % 10);
8650 void VmaStringBuilder::AddPointer(
const void* ptr)
8653 VmaPtrToStr(buf,
sizeof(buf), ptr);
8662 #if VMA_STATS_STRING_ENABLED
8666 VMA_CLASS_NO_COPY(VmaJsonWriter)
8668 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8671 void BeginObject(
bool singleLine =
false);
8674 void BeginArray(
bool singleLine =
false);
8677 void WriteString(
const char* pStr);
8678 void BeginString(
const char* pStr = VMA_NULL);
8679 void ContinueString(
const char* pStr);
8680 void ContinueString(uint32_t n);
8681 void ContinueString(uint64_t n);
8682 void ContinueString_Pointer(
const void* ptr);
8683 void EndString(
const char* pStr = VMA_NULL);
8685 void WriteNumber(uint32_t n);
8686 void WriteNumber(uint64_t n);
8687 void WriteBool(
bool b);
8691 static const char*
const INDENT;
8693 enum COLLECTION_TYPE
8695 COLLECTION_TYPE_OBJECT,
8696 COLLECTION_TYPE_ARRAY,
8700 COLLECTION_TYPE type;
8701 uint32_t valueCount;
8702 bool singleLineMode;
8705 VmaStringBuilder& m_SB;
8706 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8707 bool m_InsideString;
8709 void BeginValue(
bool isString);
8710 void WriteIndent(
bool oneLess =
false);
8713 const char*
const VmaJsonWriter::INDENT =
" ";
8715 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8717 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8718 m_InsideString(false)
8722 VmaJsonWriter::~VmaJsonWriter()
8724 VMA_ASSERT(!m_InsideString);
8725 VMA_ASSERT(m_Stack.empty());
8728 void VmaJsonWriter::BeginObject(
bool singleLine)
8730 VMA_ASSERT(!m_InsideString);
8736 item.type = COLLECTION_TYPE_OBJECT;
8737 item.valueCount = 0;
8738 item.singleLineMode = singleLine;
8739 m_Stack.push_back(item);
8742 void VmaJsonWriter::EndObject()
8744 VMA_ASSERT(!m_InsideString);
8749 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8753 void VmaJsonWriter::BeginArray(
bool singleLine)
8755 VMA_ASSERT(!m_InsideString);
8761 item.type = COLLECTION_TYPE_ARRAY;
8762 item.valueCount = 0;
8763 item.singleLineMode = singleLine;
8764 m_Stack.push_back(item);
8767 void VmaJsonWriter::EndArray()
8769 VMA_ASSERT(!m_InsideString);
8774 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8778 void VmaJsonWriter::WriteString(
const char* pStr)
8784 void VmaJsonWriter::BeginString(
const char* pStr)
8786 VMA_ASSERT(!m_InsideString);
8790 m_InsideString =
true;
8791 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8793 ContinueString(pStr);
8797 void VmaJsonWriter::ContinueString(
const char* pStr)
8799 VMA_ASSERT(m_InsideString);
8801 const size_t strLen = strlen(pStr);
8802 for(
size_t i = 0; i < strLen; ++i)
8835 VMA_ASSERT(0 &&
"Character not currently supported.");
8841 void VmaJsonWriter::ContinueString(uint32_t n)
8843 VMA_ASSERT(m_InsideString);
8847 void VmaJsonWriter::ContinueString(uint64_t n)
8849 VMA_ASSERT(m_InsideString);
8853 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8855 VMA_ASSERT(m_InsideString);
8856 m_SB.AddPointer(ptr);
8859 void VmaJsonWriter::EndString(
const char* pStr)
8861 VMA_ASSERT(m_InsideString);
8862 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8864 ContinueString(pStr);
8867 m_InsideString =
false;
8870 void VmaJsonWriter::WriteNumber(uint32_t n)
8872 VMA_ASSERT(!m_InsideString);
8877 void VmaJsonWriter::WriteNumber(uint64_t n)
8879 VMA_ASSERT(!m_InsideString);
8884 void VmaJsonWriter::WriteBool(
bool b)
8886 VMA_ASSERT(!m_InsideString);
8888 m_SB.Add(b ?
"true" :
"false");
8891 void VmaJsonWriter::WriteNull()
8893 VMA_ASSERT(!m_InsideString);
8898 void VmaJsonWriter::BeginValue(
bool isString)
8900 if(!m_Stack.empty())
8902 StackItem& currItem = m_Stack.back();
8903 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8904 currItem.valueCount % 2 == 0)
8906 VMA_ASSERT(isString);
8909 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8910 currItem.valueCount % 2 != 0)
8914 else if(currItem.valueCount > 0)
8923 ++currItem.valueCount;
8927 void VmaJsonWriter::WriteIndent(
bool oneLess)
8929 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8933 size_t count = m_Stack.size();
8934 if(count > 0 && oneLess)
8938 for(
size_t i = 0; i < count; ++i)
8949 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8951 if(IsUserDataString())
8953 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8955 FreeUserDataString(hAllocator);
8957 if(pUserData != VMA_NULL)
8959 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8964 m_pUserData = pUserData;
8968 void VmaAllocation_T::ChangeBlockAllocation(
8970 VmaDeviceMemoryBlock* block,
8971 VkDeviceSize offset)
8973 VMA_ASSERT(block != VMA_NULL);
8974 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8977 if(block != m_BlockAllocation.m_Block)
8979 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8980 if(IsPersistentMap())
8982 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8983 block->Map(hAllocator, mapRefCount, VMA_NULL);
8986 m_BlockAllocation.m_Block = block;
8987 m_BlockAllocation.m_Offset = offset;
8990 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8992 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8993 m_BlockAllocation.m_Offset = newOffset;
8996 VkDeviceSize VmaAllocation_T::GetOffset()
const
9000 case ALLOCATION_TYPE_BLOCK:
9001 return m_BlockAllocation.m_Offset;
9002 case ALLOCATION_TYPE_DEDICATED:
9010 VkDeviceMemory VmaAllocation_T::GetMemory()
const
9014 case ALLOCATION_TYPE_BLOCK:
9015 return m_BlockAllocation.m_Block->GetDeviceMemory();
9016 case ALLOCATION_TYPE_DEDICATED:
9017 return m_DedicatedAllocation.m_hMemory;
9020 return VK_NULL_HANDLE;
9024 void* VmaAllocation_T::GetMappedData()
const
9028 case ALLOCATION_TYPE_BLOCK:
9031 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
9032 VMA_ASSERT(pBlockData != VMA_NULL);
9033 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
9040 case ALLOCATION_TYPE_DEDICATED:
9041 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
9042 return m_DedicatedAllocation.m_pMappedData;
9049 bool VmaAllocation_T::CanBecomeLost()
const
9053 case ALLOCATION_TYPE_BLOCK:
9054 return m_BlockAllocation.m_CanBecomeLost;
9055 case ALLOCATION_TYPE_DEDICATED:
9063 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9065 VMA_ASSERT(CanBecomeLost());
9071 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
9074 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9079 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
9085 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
9095 #if VMA_STATS_STRING_ENABLED
9098 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
9107 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
9109 json.WriteString(
"Type");
9110 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
9112 json.WriteString(
"Size");
9113 json.WriteNumber(m_Size);
9115 if(m_pUserData != VMA_NULL)
9117 json.WriteString(
"UserData");
9118 if(IsUserDataString())
9120 json.WriteString((
const char*)m_pUserData);
9125 json.ContinueString_Pointer(m_pUserData);
9130 json.WriteString(
"CreationFrameIndex");
9131 json.WriteNumber(m_CreationFrameIndex);
9133 json.WriteString(
"LastUseFrameIndex");
9134 json.WriteNumber(GetLastUseFrameIndex());
9136 if(m_BufferImageUsage != 0)
9138 json.WriteString(
"Usage");
9139 json.WriteNumber(m_BufferImageUsage);
9145 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
9147 VMA_ASSERT(IsUserDataString());
9148 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
9149 m_pUserData = VMA_NULL;
9152 void VmaAllocation_T::BlockAllocMap()
9154 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9156 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9162 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
9166 void VmaAllocation_T::BlockAllocUnmap()
9168 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9170 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9176 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
9180 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
9182 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9186 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9188 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
9189 *ppData = m_DedicatedAllocation.m_pMappedData;
9195 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
9196 return VK_ERROR_MEMORY_MAP_FAILED;
9201 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9202 hAllocator->m_hDevice,
9203 m_DedicatedAllocation.m_hMemory,
9208 if(result == VK_SUCCESS)
9210 m_DedicatedAllocation.m_pMappedData = *ppData;
9217 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
9219 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9221 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9226 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
9227 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
9228 hAllocator->m_hDevice,
9229 m_DedicatedAllocation.m_hMemory);
9234 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
9238 #if VMA_STATS_STRING_ENABLED
9240 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
9244 json.WriteString(
"Blocks");
9247 json.WriteString(
"Allocations");
9250 json.WriteString(
"UnusedRanges");
9253 json.WriteString(
"UsedBytes");
9256 json.WriteString(
"UnusedBytes");
9261 json.WriteString(
"AllocationSize");
9262 json.BeginObject(
true);
9263 json.WriteString(
"Min");
9265 json.WriteString(
"Avg");
9267 json.WriteString(
"Max");
9274 json.WriteString(
"UnusedRangeSize");
9275 json.BeginObject(
true);
9276 json.WriteString(
"Min");
9278 json.WriteString(
"Avg");
9280 json.WriteString(
"Max");
9290 struct VmaSuballocationItemSizeLess
9293 const VmaSuballocationList::iterator lhs,
9294 const VmaSuballocationList::iterator rhs)
const
9296 return lhs->size < rhs->size;
9299 const VmaSuballocationList::iterator lhs,
9300 VkDeviceSize rhsSize)
const
9302 return lhs->size < rhsSize;
9310 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
9312 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
9316 #if VMA_STATS_STRING_ENABLED
9318 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
9319 VkDeviceSize unusedBytes,
9320 size_t allocationCount,
9321 size_t unusedRangeCount)
const
9325 json.WriteString(
"TotalBytes");
9326 json.WriteNumber(GetSize());
9328 json.WriteString(
"UnusedBytes");
9329 json.WriteNumber(unusedBytes);
9331 json.WriteString(
"Allocations");
9332 json.WriteNumber((uint64_t)allocationCount);
9334 json.WriteString(
"UnusedRanges");
9335 json.WriteNumber((uint64_t)unusedRangeCount);
9337 json.WriteString(
"Suballocations");
9341 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
9342 VkDeviceSize offset,
9345 json.BeginObject(
true);
9347 json.WriteString(
"Offset");
9348 json.WriteNumber(offset);
9350 hAllocation->PrintParameters(json);
9355 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
9356 VkDeviceSize offset,
9357 VkDeviceSize size)
const
9359 json.BeginObject(
true);
9361 json.WriteString(
"Offset");
9362 json.WriteNumber(offset);
9364 json.WriteString(
"Type");
9365 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
9367 json.WriteString(
"Size");
9368 json.WriteNumber(size);
9373 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
9384 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
9385 VmaBlockMetadata(hAllocator),
9388 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9389 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9393 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9397 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9399 VmaBlockMetadata::Init(size);
9402 m_SumFreeSize = size;
9404 VmaSuballocation suballoc = {};
9405 suballoc.offset = 0;
9406 suballoc.size = size;
9407 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9408 suballoc.hAllocation = VK_NULL_HANDLE;
9410 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9411 m_Suballocations.push_back(suballoc);
9412 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9414 m_FreeSuballocationsBySize.push_back(suballocItem);
9417 bool VmaBlockMetadata_Generic::Validate()
const
9419 VMA_VALIDATE(!m_Suballocations.empty());
9422 VkDeviceSize calculatedOffset = 0;
9424 uint32_t calculatedFreeCount = 0;
9426 VkDeviceSize calculatedSumFreeSize = 0;
9429 size_t freeSuballocationsToRegister = 0;
9431 bool prevFree =
false;
9433 for(
const auto& subAlloc : m_Suballocations)
9436 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9438 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9440 VMA_VALIDATE(!prevFree || !currFree);
9442 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9446 calculatedSumFreeSize += subAlloc.size;
9447 ++calculatedFreeCount;
9448 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9450 ++freeSuballocationsToRegister;
9454 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9458 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9459 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9462 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9465 calculatedOffset += subAlloc.size;
9466 prevFree = currFree;
9471 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9473 VkDeviceSize lastSize = 0;
9474 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9476 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9479 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9481 VMA_VALIDATE(suballocItem->size >= lastSize);
9483 lastSize = suballocItem->size;
9487 VMA_VALIDATE(ValidateFreeSuballocationList());
9488 VMA_VALIDATE(calculatedOffset == GetSize());
9489 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9490 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9495 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9497 if(!m_FreeSuballocationsBySize.empty())
9499 return m_FreeSuballocationsBySize.back()->size;
9507 bool VmaBlockMetadata_Generic::IsEmpty()
const
9509 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9512 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9516 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9528 for(
const auto& suballoc : m_Suballocations)
9530 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9543 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9545 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9547 inoutStats.
size += GetSize();
9554 #if VMA_STATS_STRING_ENABLED
9556 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9558 PrintDetailedMap_Begin(json,
9560 m_Suballocations.size() - (
size_t)m_FreeCount,
9563 for(
const auto& suballoc : m_Suballocations)
9565 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9567 PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size);
9571 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9575 PrintDetailedMap_End(json);
9580 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9581 uint32_t currentFrameIndex,
9582 uint32_t frameInUseCount,
9583 VkDeviceSize bufferImageGranularity,
9584 VkDeviceSize allocSize,
9585 VkDeviceSize allocAlignment,
9587 VmaSuballocationType allocType,
9588 bool canMakeOtherLost,
9590 VmaAllocationRequest* pAllocationRequest)
9592 VMA_ASSERT(allocSize > 0);
9593 VMA_ASSERT(!upperAddress);
9594 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9595 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9596 VMA_HEAVY_ASSERT(Validate());
9598 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9601 if(canMakeOtherLost ==
false &&
9602 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9608 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9609 if(freeSuballocCount > 0)
9614 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9615 m_FreeSuballocationsBySize.data(),
9616 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9617 allocSize + 2 * VMA_DEBUG_MARGIN,
9618 VmaSuballocationItemSizeLess());
9619 size_t index = it - m_FreeSuballocationsBySize.data();
9620 for(; index < freeSuballocCount; ++index)
9625 bufferImageGranularity,
9629 m_FreeSuballocationsBySize[index],
9631 &pAllocationRequest->offset,
9632 &pAllocationRequest->itemsToMakeLostCount,
9633 &pAllocationRequest->sumFreeSize,
9634 &pAllocationRequest->sumItemSize))
9636 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9641 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9643 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9644 it != m_Suballocations.end();
9647 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9650 bufferImageGranularity,
9656 &pAllocationRequest->offset,
9657 &pAllocationRequest->itemsToMakeLostCount,
9658 &pAllocationRequest->sumFreeSize,
9659 &pAllocationRequest->sumItemSize))
9661 pAllocationRequest->item = it;
9669 for(
size_t index = freeSuballocCount; index--; )
9674 bufferImageGranularity,
9678 m_FreeSuballocationsBySize[index],
9680 &pAllocationRequest->offset,
9681 &pAllocationRequest->itemsToMakeLostCount,
9682 &pAllocationRequest->sumFreeSize,
9683 &pAllocationRequest->sumItemSize))
9685 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9692 if(canMakeOtherLost)
9697 VmaAllocationRequest tmpAllocRequest = {};
9698 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9699 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9700 suballocIt != m_Suballocations.end();
9703 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9704 suballocIt->hAllocation->CanBecomeLost())
9709 bufferImageGranularity,
9715 &tmpAllocRequest.offset,
9716 &tmpAllocRequest.itemsToMakeLostCount,
9717 &tmpAllocRequest.sumFreeSize,
9718 &tmpAllocRequest.sumItemSize))
9722 *pAllocationRequest = tmpAllocRequest;
9723 pAllocationRequest->item = suballocIt;
9726 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9728 *pAllocationRequest = tmpAllocRequest;
9729 pAllocationRequest->item = suballocIt;
9742 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9743 uint32_t currentFrameIndex,
9744 uint32_t frameInUseCount,
9745 VmaAllocationRequest* pAllocationRequest)
9747 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9749 while(pAllocationRequest->itemsToMakeLostCount > 0)
9751 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9753 ++pAllocationRequest->item;
9755 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9756 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9757 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9758 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9760 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9761 --pAllocationRequest->itemsToMakeLostCount;
9769 VMA_HEAVY_ASSERT(Validate());
9770 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9771 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9776 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9778 uint32_t lostAllocationCount = 0;
9779 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9780 it != m_Suballocations.end();
9783 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9784 it->hAllocation->CanBecomeLost() &&
9785 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9787 it = FreeSuballocation(it);
9788 ++lostAllocationCount;
9791 return lostAllocationCount;
9794 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9796 for(
auto& suballoc : m_Suballocations)
9798 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9800 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9802 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9803 return VK_ERROR_VALIDATION_FAILED_EXT;
9805 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9807 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9808 return VK_ERROR_VALIDATION_FAILED_EXT;
9816 void VmaBlockMetadata_Generic::Alloc(
9817 const VmaAllocationRequest& request,
9818 VmaSuballocationType type,
9819 VkDeviceSize allocSize,
9822 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9823 VMA_ASSERT(request.item != m_Suballocations.end());
9824 VmaSuballocation& suballoc = *request.item;
9826 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9828 VMA_ASSERT(request.offset >= suballoc.offset);
9829 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9830 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9831 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9835 UnregisterFreeSuballocation(request.item);
9837 suballoc.offset = request.offset;
9838 suballoc.size = allocSize;
9839 suballoc.type = type;
9840 suballoc.hAllocation = hAllocation;
9845 VmaSuballocation paddingSuballoc = {};
9846 paddingSuballoc.offset = request.offset + allocSize;
9847 paddingSuballoc.size = paddingEnd;
9848 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9849 VmaSuballocationList::iterator next = request.item;
9851 const VmaSuballocationList::iterator paddingEndItem =
9852 m_Suballocations.insert(next, paddingSuballoc);
9853 RegisterFreeSuballocation(paddingEndItem);
9859 VmaSuballocation paddingSuballoc = {};
9860 paddingSuballoc.offset = request.offset - paddingBegin;
9861 paddingSuballoc.size = paddingBegin;
9862 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9863 const VmaSuballocationList::iterator paddingBeginItem =
9864 m_Suballocations.insert(request.item, paddingSuballoc);
9865 RegisterFreeSuballocation(paddingBeginItem);
9869 m_FreeCount = m_FreeCount - 1;
9870 if(paddingBegin > 0)
9878 m_SumFreeSize -= allocSize;
9881 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9883 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9884 suballocItem != m_Suballocations.end();
9887 VmaSuballocation& suballoc = *suballocItem;
9888 if(suballoc.hAllocation == allocation)
9890 FreeSuballocation(suballocItem);
9891 VMA_HEAVY_ASSERT(Validate());
9895 VMA_ASSERT(0 &&
"Not found!");
9898 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9900 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9901 suballocItem != m_Suballocations.end();
9904 VmaSuballocation& suballoc = *suballocItem;
9905 if(suballoc.offset == offset)
9907 FreeSuballocation(suballocItem);
9911 VMA_ASSERT(0 &&
"Not found!");
9914 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9916 VkDeviceSize lastSize = 0;
9917 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9919 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9921 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9922 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9923 VMA_VALIDATE(it->size >= lastSize);
9924 lastSize = it->size;
9929 bool VmaBlockMetadata_Generic::CheckAllocation(
9930 uint32_t currentFrameIndex,
9931 uint32_t frameInUseCount,
9932 VkDeviceSize bufferImageGranularity,
9933 VkDeviceSize allocSize,
9934 VkDeviceSize allocAlignment,
9935 VmaSuballocationType allocType,
9936 VmaSuballocationList::const_iterator suballocItem,
9937 bool canMakeOtherLost,
9938 VkDeviceSize* pOffset,
9939 size_t* itemsToMakeLostCount,
9940 VkDeviceSize* pSumFreeSize,
9941 VkDeviceSize* pSumItemSize)
const
9943 VMA_ASSERT(allocSize > 0);
9944 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9945 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9946 VMA_ASSERT(pOffset != VMA_NULL);
9948 *itemsToMakeLostCount = 0;
9952 if(canMakeOtherLost)
9954 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9956 *pSumFreeSize = suballocItem->size;
9960 if(suballocItem->hAllocation->CanBecomeLost() &&
9961 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9963 ++*itemsToMakeLostCount;
9964 *pSumItemSize = suballocItem->size;
9973 if(GetSize() - suballocItem->offset < allocSize)
9979 *pOffset = suballocItem->offset;
9982 if(VMA_DEBUG_MARGIN > 0)
9984 *pOffset += VMA_DEBUG_MARGIN;
9988 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9992 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
9994 bool bufferImageGranularityConflict =
false;
9995 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9996 while(prevSuballocItem != m_Suballocations.cbegin())
9999 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10000 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10002 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10004 bufferImageGranularityConflict =
true;
10012 if(bufferImageGranularityConflict)
10014 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10020 if(*pOffset >= suballocItem->offset + suballocItem->size)
10026 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
10029 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10031 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
10033 if(suballocItem->offset + totalSize > GetSize())
10040 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
10041 if(totalSize > suballocItem->size)
10043 VkDeviceSize remainingSize = totalSize - suballocItem->size;
10044 while(remainingSize > 0)
10046 ++lastSuballocItem;
10047 if(lastSuballocItem == m_Suballocations.cend())
10051 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10053 *pSumFreeSize += lastSuballocItem->size;
10057 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
10058 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
10059 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10061 ++*itemsToMakeLostCount;
10062 *pSumItemSize += lastSuballocItem->size;
10069 remainingSize = (lastSuballocItem->size < remainingSize) ?
10070 remainingSize - lastSuballocItem->size : 0;
10076 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10078 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
10079 ++nextSuballocItem;
10080 while(nextSuballocItem != m_Suballocations.cend())
10082 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10083 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10085 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10087 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
10088 if(nextSuballoc.hAllocation->CanBecomeLost() &&
10089 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10091 ++*itemsToMakeLostCount;
10104 ++nextSuballocItem;
10110 const VmaSuballocation& suballoc = *suballocItem;
10111 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10113 *pSumFreeSize = suballoc.size;
10116 if(suballoc.size < allocSize)
10122 *pOffset = suballoc.offset;
10125 if(VMA_DEBUG_MARGIN > 0)
10127 *pOffset += VMA_DEBUG_MARGIN;
10131 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
10135 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
10137 bool bufferImageGranularityConflict =
false;
10138 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
10139 while(prevSuballocItem != m_Suballocations.cbegin())
10141 --prevSuballocItem;
10142 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10143 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10145 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10147 bufferImageGranularityConflict =
true;
10155 if(bufferImageGranularityConflict)
10157 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10162 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
10165 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10168 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
10175 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10177 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
10178 ++nextSuballocItem;
10179 while(nextSuballocItem != m_Suballocations.cend())
10181 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10182 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10184 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10194 ++nextSuballocItem;
10203 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
10205 VMA_ASSERT(item != m_Suballocations.end());
10206 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10208 VmaSuballocationList::iterator nextItem = item;
10210 VMA_ASSERT(nextItem != m_Suballocations.end());
10211 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
10213 item->size += nextItem->size;
10215 m_Suballocations.erase(nextItem);
10218 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
10221 VmaSuballocation& suballoc = *suballocItem;
10222 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10223 suballoc.hAllocation = VK_NULL_HANDLE;
10227 m_SumFreeSize += suballoc.size;
10230 bool mergeWithNext =
false;
10231 bool mergeWithPrev =
false;
10233 VmaSuballocationList::iterator nextItem = suballocItem;
10235 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
10237 mergeWithNext =
true;
10240 VmaSuballocationList::iterator prevItem = suballocItem;
10241 if(suballocItem != m_Suballocations.begin())
10244 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10246 mergeWithPrev =
true;
10252 UnregisterFreeSuballocation(nextItem);
10253 MergeFreeWithNext(suballocItem);
10258 UnregisterFreeSuballocation(prevItem);
10259 MergeFreeWithNext(prevItem);
10260 RegisterFreeSuballocation(prevItem);
10265 RegisterFreeSuballocation(suballocItem);
10266 return suballocItem;
10270 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
10272 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10273 VMA_ASSERT(item->size > 0);
10277 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10279 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10281 if(m_FreeSuballocationsBySize.empty())
10283 m_FreeSuballocationsBySize.push_back(item);
10287 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
10295 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
10297 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10298 VMA_ASSERT(item->size > 0);
10302 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10304 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10306 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
10307 m_FreeSuballocationsBySize.data(),
10308 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
10310 VmaSuballocationItemSizeLess());
10311 for(
size_t index = it - m_FreeSuballocationsBySize.data();
10312 index < m_FreeSuballocationsBySize.size();
10315 if(m_FreeSuballocationsBySize[index] == item)
10317 VmaVectorRemove(m_FreeSuballocationsBySize, index);
10320 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
10322 VMA_ASSERT(0 &&
"Not found.");
10328 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
10329 VkDeviceSize bufferImageGranularity,
10330 VmaSuballocationType& inOutPrevSuballocType)
const
10332 if(bufferImageGranularity == 1 || IsEmpty())
10337 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
10338 bool typeConflictFound =
false;
10339 for(
const auto& suballoc : m_Suballocations)
10341 const VmaSuballocationType suballocType = suballoc.type;
10342 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
10344 minAlignment = VMA_MIN(minAlignment, suballoc.hAllocation->GetAlignment());
10345 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
10347 typeConflictFound =
true;
10349 inOutPrevSuballocType = suballocType;
10353 return typeConflictFound || minAlignment >= bufferImageGranularity;
10359 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
10360 VmaBlockMetadata(hAllocator),
10362 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10363 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10364 m_1stVectorIndex(0),
10365 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
10366 m_1stNullItemsBeginCount(0),
10367 m_1stNullItemsMiddleCount(0),
10368 m_2ndNullItemsCount(0)
10372 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10376 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10378 VmaBlockMetadata::Init(size);
10379 m_SumFreeSize = size;
10382 bool VmaBlockMetadata_Linear::Validate()
const
10384 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10385 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10387 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10388 VMA_VALIDATE(!suballocations1st.empty() ||
10389 suballocations2nd.empty() ||
10390 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10392 if(!suballocations1st.empty())
10395 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10397 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10399 if(!suballocations2nd.empty())
10402 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10405 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10406 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10408 VkDeviceSize sumUsedSize = 0;
10409 const size_t suballoc1stCount = suballocations1st.size();
10410 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10412 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10414 const size_t suballoc2ndCount = suballocations2nd.size();
10415 size_t nullItem2ndCount = 0;
10416 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10418 const VmaSuballocation& suballoc = suballocations2nd[i];
10419 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10421 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10422 VMA_VALIDATE(suballoc.offset >= offset);
10426 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10427 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10428 sumUsedSize += suballoc.size;
10432 ++nullItem2ndCount;
10435 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10438 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10441 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10443 const VmaSuballocation& suballoc = suballocations1st[i];
10444 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10445 suballoc.hAllocation == VK_NULL_HANDLE);
10448 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10450 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10452 const VmaSuballocation& suballoc = suballocations1st[i];
10453 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10455 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10456 VMA_VALIDATE(suballoc.offset >= offset);
10457 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10461 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10462 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10463 sumUsedSize += suballoc.size;
10467 ++nullItem1stCount;
10470 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10472 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10474 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10476 const size_t suballoc2ndCount = suballocations2nd.size();
10477 size_t nullItem2ndCount = 0;
10478 for(
size_t i = suballoc2ndCount; i--; )
10480 const VmaSuballocation& suballoc = suballocations2nd[i];
10481 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10483 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10484 VMA_VALIDATE(suballoc.offset >= offset);
10488 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10489 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10490 sumUsedSize += suballoc.size;
10494 ++nullItem2ndCount;
10497 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10500 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10503 VMA_VALIDATE(offset <= GetSize());
10504 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10509 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10511 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10512 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10515 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10517 const VkDeviceSize size = GetSize();
10529 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10531 switch(m_2ndVectorMode)
10533 case SECOND_VECTOR_EMPTY:
10539 const size_t suballocations1stCount = suballocations1st.size();
10540 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10541 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10542 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10544 firstSuballoc.offset,
10545 size - (lastSuballoc.offset + lastSuballoc.size));
10549 case SECOND_VECTOR_RING_BUFFER:
10554 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10555 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10556 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10557 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10561 case SECOND_VECTOR_DOUBLE_STACK:
10566 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10567 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10568 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10569 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10579 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10581 const VkDeviceSize size = GetSize();
10582 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10583 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10584 const size_t suballoc1stCount = suballocations1st.size();
10585 const size_t suballoc2ndCount = suballocations2nd.size();
10596 VkDeviceSize lastOffset = 0;
10598 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10600 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10601 size_t nextAlloc2ndIndex = 0;
10602 while(lastOffset < freeSpace2ndTo1stEnd)
10605 while(nextAlloc2ndIndex < suballoc2ndCount &&
10606 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10608 ++nextAlloc2ndIndex;
10612 if(nextAlloc2ndIndex < suballoc2ndCount)
10614 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10617 if(lastOffset < suballoc.offset)
10620 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10634 lastOffset = suballoc.offset + suballoc.size;
10635 ++nextAlloc2ndIndex;
10641 if(lastOffset < freeSpace2ndTo1stEnd)
10643 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10651 lastOffset = freeSpace2ndTo1stEnd;
10656 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10657 const VkDeviceSize freeSpace1stTo2ndEnd =
10658 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10659 while(lastOffset < freeSpace1stTo2ndEnd)
10662 while(nextAlloc1stIndex < suballoc1stCount &&
10663 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10665 ++nextAlloc1stIndex;
10669 if(nextAlloc1stIndex < suballoc1stCount)
10671 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10674 if(lastOffset < suballoc.offset)
10677 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10691 lastOffset = suballoc.offset + suballoc.size;
10692 ++nextAlloc1stIndex;
10698 if(lastOffset < freeSpace1stTo2ndEnd)
10700 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10708 lastOffset = freeSpace1stTo2ndEnd;
10712 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10714 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10715 while(lastOffset < size)
10718 while(nextAlloc2ndIndex != SIZE_MAX &&
10719 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10721 --nextAlloc2ndIndex;
10725 if(nextAlloc2ndIndex != SIZE_MAX)
10727 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10730 if(lastOffset < suballoc.offset)
10733 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10747 lastOffset = suballoc.offset + suballoc.size;
10748 --nextAlloc2ndIndex;
10754 if(lastOffset < size)
10756 const VkDeviceSize unusedRangeSize = size - lastOffset;
10772 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10774 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10775 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10776 const VkDeviceSize size = GetSize();
10777 const size_t suballoc1stCount = suballocations1st.size();
10778 const size_t suballoc2ndCount = suballocations2nd.size();
10780 inoutStats.
size += size;
10782 VkDeviceSize lastOffset = 0;
10784 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10786 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10787 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10788 while(lastOffset < freeSpace2ndTo1stEnd)
10791 while(nextAlloc2ndIndex < suballoc2ndCount &&
10792 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10794 ++nextAlloc2ndIndex;
10798 if(nextAlloc2ndIndex < suballoc2ndCount)
10800 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10803 if(lastOffset < suballoc.offset)
10806 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10817 lastOffset = suballoc.offset + suballoc.size;
10818 ++nextAlloc2ndIndex;
10823 if(lastOffset < freeSpace2ndTo1stEnd)
10826 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10833 lastOffset = freeSpace2ndTo1stEnd;
10838 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10839 const VkDeviceSize freeSpace1stTo2ndEnd =
10840 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10841 while(lastOffset < freeSpace1stTo2ndEnd)
10844 while(nextAlloc1stIndex < suballoc1stCount &&
10845 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10847 ++nextAlloc1stIndex;
10851 if(nextAlloc1stIndex < suballoc1stCount)
10853 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10856 if(lastOffset < suballoc.offset)
10859 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10870 lastOffset = suballoc.offset + suballoc.size;
10871 ++nextAlloc1stIndex;
10876 if(lastOffset < freeSpace1stTo2ndEnd)
10879 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10886 lastOffset = freeSpace1stTo2ndEnd;
10890 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10892 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10893 while(lastOffset < size)
10896 while(nextAlloc2ndIndex != SIZE_MAX &&
10897 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10899 --nextAlloc2ndIndex;
10903 if(nextAlloc2ndIndex != SIZE_MAX)
10905 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10908 if(lastOffset < suballoc.offset)
10911 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10922 lastOffset = suballoc.offset + suballoc.size;
10923 --nextAlloc2ndIndex;
10928 if(lastOffset < size)
10931 const VkDeviceSize unusedRangeSize = size - lastOffset;
10944 #if VMA_STATS_STRING_ENABLED
10945 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10947 const VkDeviceSize size = GetSize();
10948 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10949 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10950 const size_t suballoc1stCount = suballocations1st.size();
10951 const size_t suballoc2ndCount = suballocations2nd.size();
10955 size_t unusedRangeCount = 0;
10956 VkDeviceSize usedBytes = 0;
10958 VkDeviceSize lastOffset = 0;
10960 size_t alloc2ndCount = 0;
10961 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10963 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10964 size_t nextAlloc2ndIndex = 0;
10965 while(lastOffset < freeSpace2ndTo1stEnd)
10968 while(nextAlloc2ndIndex < suballoc2ndCount &&
10969 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10971 ++nextAlloc2ndIndex;
10975 if(nextAlloc2ndIndex < suballoc2ndCount)
10977 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10980 if(lastOffset < suballoc.offset)
10983 ++unusedRangeCount;
10989 usedBytes += suballoc.size;
10992 lastOffset = suballoc.offset + suballoc.size;
10993 ++nextAlloc2ndIndex;
10998 if(lastOffset < freeSpace2ndTo1stEnd)
11001 ++unusedRangeCount;
11005 lastOffset = freeSpace2ndTo1stEnd;
11010 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
11011 size_t alloc1stCount = 0;
11012 const VkDeviceSize freeSpace1stTo2ndEnd =
11013 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
11014 while(lastOffset < freeSpace1stTo2ndEnd)
11017 while(nextAlloc1stIndex < suballoc1stCount &&
11018 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11020 ++nextAlloc1stIndex;
11024 if(nextAlloc1stIndex < suballoc1stCount)
11026 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11029 if(lastOffset < suballoc.offset)
11032 ++unusedRangeCount;
11038 usedBytes += suballoc.size;
11041 lastOffset = suballoc.offset + suballoc.size;
11042 ++nextAlloc1stIndex;
11047 if(lastOffset < size)
11050 ++unusedRangeCount;
11054 lastOffset = freeSpace1stTo2ndEnd;
11058 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11060 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11061 while(lastOffset < size)
11064 while(nextAlloc2ndIndex != SIZE_MAX &&
11065 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11067 --nextAlloc2ndIndex;
11071 if(nextAlloc2ndIndex != SIZE_MAX)
11073 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11076 if(lastOffset < suballoc.offset)
11079 ++unusedRangeCount;
11085 usedBytes += suballoc.size;
11088 lastOffset = suballoc.offset + suballoc.size;
11089 --nextAlloc2ndIndex;
11094 if(lastOffset < size)
11097 ++unusedRangeCount;
11106 const VkDeviceSize unusedBytes = size - usedBytes;
11107 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
11112 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11114 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
11115 size_t nextAlloc2ndIndex = 0;
11116 while(lastOffset < freeSpace2ndTo1stEnd)
11119 while(nextAlloc2ndIndex < suballoc2ndCount &&
11120 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11122 ++nextAlloc2ndIndex;
11126 if(nextAlloc2ndIndex < suballoc2ndCount)
11128 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11131 if(lastOffset < suballoc.offset)
11134 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11135 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11140 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11143 lastOffset = suballoc.offset + suballoc.size;
11144 ++nextAlloc2ndIndex;
11149 if(lastOffset < freeSpace2ndTo1stEnd)
11152 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
11153 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11157 lastOffset = freeSpace2ndTo1stEnd;
11162 nextAlloc1stIndex = m_1stNullItemsBeginCount;
11163 while(lastOffset < freeSpace1stTo2ndEnd)
11166 while(nextAlloc1stIndex < suballoc1stCount &&
11167 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11169 ++nextAlloc1stIndex;
11173 if(nextAlloc1stIndex < suballoc1stCount)
11175 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11178 if(lastOffset < suballoc.offset)
11181 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11182 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11187 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11190 lastOffset = suballoc.offset + suballoc.size;
11191 ++nextAlloc1stIndex;
11196 if(lastOffset < freeSpace1stTo2ndEnd)
11199 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
11200 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11204 lastOffset = freeSpace1stTo2ndEnd;
11208 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11210 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11211 while(lastOffset < size)
11214 while(nextAlloc2ndIndex != SIZE_MAX &&
11215 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11217 --nextAlloc2ndIndex;
11221 if(nextAlloc2ndIndex != SIZE_MAX)
11223 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11226 if(lastOffset < suballoc.offset)
11229 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11230 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11235 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11238 lastOffset = suballoc.offset + suballoc.size;
11239 --nextAlloc2ndIndex;
11244 if(lastOffset < size)
11247 const VkDeviceSize unusedRangeSize = size - lastOffset;
11248 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11257 PrintDetailedMap_End(json);
11261 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
11262 uint32_t currentFrameIndex,
11263 uint32_t frameInUseCount,
11264 VkDeviceSize bufferImageGranularity,
11265 VkDeviceSize allocSize,
11266 VkDeviceSize allocAlignment,
11268 VmaSuballocationType allocType,
11269 bool canMakeOtherLost,
11271 VmaAllocationRequest* pAllocationRequest)
11273 VMA_ASSERT(allocSize > 0);
11274 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
11275 VMA_ASSERT(pAllocationRequest != VMA_NULL);
11276 VMA_HEAVY_ASSERT(Validate());
11277 return upperAddress ?
11278 CreateAllocationRequest_UpperAddress(
11279 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11280 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
11281 CreateAllocationRequest_LowerAddress(
11282 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11283 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
11286 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
11287 uint32_t currentFrameIndex,
11288 uint32_t frameInUseCount,
11289 VkDeviceSize bufferImageGranularity,
11290 VkDeviceSize allocSize,
11291 VkDeviceSize allocAlignment,
11292 VmaSuballocationType allocType,
11293 bool canMakeOtherLost,
11295 VmaAllocationRequest* pAllocationRequest)
11297 const VkDeviceSize size = GetSize();
11298 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11299 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11301 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11303 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
11308 if(allocSize > size)
11312 VkDeviceSize resultBaseOffset = size - allocSize;
11313 if(!suballocations2nd.empty())
11315 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11316 resultBaseOffset = lastSuballoc.offset - allocSize;
11317 if(allocSize > lastSuballoc.offset)
11324 VkDeviceSize resultOffset = resultBaseOffset;
11327 if(VMA_DEBUG_MARGIN > 0)
11329 if(resultOffset < VMA_DEBUG_MARGIN)
11333 resultOffset -= VMA_DEBUG_MARGIN;
11337 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
11341 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11343 bool bufferImageGranularityConflict =
false;
11344 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11346 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11347 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11349 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
11351 bufferImageGranularityConflict =
true;
11359 if(bufferImageGranularityConflict)
11361 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
11366 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
11367 suballocations1st.back().offset + suballocations1st.back().size :
11369 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
11373 if(bufferImageGranularity > 1)
11375 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11377 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11378 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11380 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11394 pAllocationRequest->offset = resultOffset;
11395 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11396 pAllocationRequest->sumItemSize = 0;
11398 pAllocationRequest->itemsToMakeLostCount = 0;
11399 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11406 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11407 uint32_t currentFrameIndex,
11408 uint32_t frameInUseCount,
11409 VkDeviceSize bufferImageGranularity,
11410 VkDeviceSize allocSize,
11411 VkDeviceSize allocAlignment,
11412 VmaSuballocationType allocType,
11413 bool canMakeOtherLost,
11415 VmaAllocationRequest* pAllocationRequest)
11417 const VkDeviceSize size = GetSize();
11418 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11419 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11421 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11425 VkDeviceSize resultBaseOffset = 0;
11426 if(!suballocations1st.empty())
11428 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11429 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11433 VkDeviceSize resultOffset = resultBaseOffset;
11436 if(VMA_DEBUG_MARGIN > 0)
11438 resultOffset += VMA_DEBUG_MARGIN;
11442 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11446 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
11448 bool bufferImageGranularityConflict =
false;
11449 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11451 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11452 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11454 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11456 bufferImageGranularityConflict =
true;
11464 if(bufferImageGranularityConflict)
11466 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11470 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11471 suballocations2nd.back().offset : size;
11474 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11478 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11480 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11482 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11483 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11485 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11499 pAllocationRequest->offset = resultOffset;
11500 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11501 pAllocationRequest->sumItemSize = 0;
11503 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11504 pAllocationRequest->itemsToMakeLostCount = 0;
11511 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11513 VMA_ASSERT(!suballocations1st.empty());
11515 VkDeviceSize resultBaseOffset = 0;
11516 if(!suballocations2nd.empty())
11518 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11519 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11523 VkDeviceSize resultOffset = resultBaseOffset;
11526 if(VMA_DEBUG_MARGIN > 0)
11528 resultOffset += VMA_DEBUG_MARGIN;
11532 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11536 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11538 bool bufferImageGranularityConflict =
false;
11539 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11541 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11542 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11544 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11546 bufferImageGranularityConflict =
true;
11554 if(bufferImageGranularityConflict)
11556 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11560 pAllocationRequest->itemsToMakeLostCount = 0;
11561 pAllocationRequest->sumItemSize = 0;
11562 size_t index1st = m_1stNullItemsBeginCount;
11564 if(canMakeOtherLost)
11566 while(index1st < suballocations1st.size() &&
11567 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11570 const VmaSuballocation& suballoc = suballocations1st[index1st];
11571 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11577 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11578 if(suballoc.hAllocation->CanBecomeLost() &&
11579 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11581 ++pAllocationRequest->itemsToMakeLostCount;
11582 pAllocationRequest->sumItemSize += suballoc.size;
11594 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11596 while(index1st < suballocations1st.size())
11598 const VmaSuballocation& suballoc = suballocations1st[index1st];
11599 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11601 if(suballoc.hAllocation != VK_NULL_HANDLE)
11604 if(suballoc.hAllocation->CanBecomeLost() &&
11605 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11607 ++pAllocationRequest->itemsToMakeLostCount;
11608 pAllocationRequest->sumItemSize += suballoc.size;
11626 if(index1st == suballocations1st.size() &&
11627 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11630 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11635 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11636 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11640 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11642 for(
size_t nextSuballocIndex = index1st;
11643 nextSuballocIndex < suballocations1st.size();
11644 nextSuballocIndex++)
11646 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11647 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11649 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11663 pAllocationRequest->offset = resultOffset;
11664 pAllocationRequest->sumFreeSize =
11665 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11667 - pAllocationRequest->sumItemSize;
11668 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11677 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11678 uint32_t currentFrameIndex,
11679 uint32_t frameInUseCount,
11680 VmaAllocationRequest* pAllocationRequest)
11682 if(pAllocationRequest->itemsToMakeLostCount == 0)
11687 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11690 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11691 size_t index = m_1stNullItemsBeginCount;
11692 size_t madeLostCount = 0;
11693 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11695 if(index == suballocations->size())
11699 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11701 suballocations = &AccessSuballocations2nd();
11705 VMA_ASSERT(!suballocations->empty());
11707 VmaSuballocation& suballoc = (*suballocations)[index];
11708 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11710 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11711 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11712 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11714 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11715 suballoc.hAllocation = VK_NULL_HANDLE;
11716 m_SumFreeSize += suballoc.size;
11717 if(suballocations == &AccessSuballocations1st())
11719 ++m_1stNullItemsMiddleCount;
11723 ++m_2ndNullItemsCount;
11735 CleanupAfterFree();
11741 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11743 uint32_t lostAllocationCount = 0;
11745 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11746 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11748 VmaSuballocation& suballoc = suballocations1st[i];
11749 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11750 suballoc.hAllocation->CanBecomeLost() &&
11751 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11753 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11754 suballoc.hAllocation = VK_NULL_HANDLE;
11755 ++m_1stNullItemsMiddleCount;
11756 m_SumFreeSize += suballoc.size;
11757 ++lostAllocationCount;
11761 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11762 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11764 VmaSuballocation& suballoc = suballocations2nd[i];
11765 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11766 suballoc.hAllocation->CanBecomeLost() &&
11767 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11769 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11770 suballoc.hAllocation = VK_NULL_HANDLE;
11771 ++m_2ndNullItemsCount;
11772 m_SumFreeSize += suballoc.size;
11773 ++lostAllocationCount;
11777 if(lostAllocationCount)
11779 CleanupAfterFree();
11782 return lostAllocationCount;
11785 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11787 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11788 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11790 const VmaSuballocation& suballoc = suballocations1st[i];
11791 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11793 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11795 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11796 return VK_ERROR_VALIDATION_FAILED_EXT;
11798 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11800 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11801 return VK_ERROR_VALIDATION_FAILED_EXT;
11806 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11807 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11809 const VmaSuballocation& suballoc = suballocations2nd[i];
11810 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11812 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11814 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11815 return VK_ERROR_VALIDATION_FAILED_EXT;
11817 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11819 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11820 return VK_ERROR_VALIDATION_FAILED_EXT;
11828 void VmaBlockMetadata_Linear::Alloc(
11829 const VmaAllocationRequest& request,
11830 VmaSuballocationType type,
11831 VkDeviceSize allocSize,
11834 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11836 switch(request.type)
11838 case VmaAllocationRequestType::UpperAddress:
11840 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11841 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11842 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11843 suballocations2nd.push_back(newSuballoc);
11844 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11847 case VmaAllocationRequestType::EndOf1st:
11849 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11851 VMA_ASSERT(suballocations1st.empty() ||
11852 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11854 VMA_ASSERT(request.offset + allocSize <= GetSize());
11856 suballocations1st.push_back(newSuballoc);
11859 case VmaAllocationRequestType::EndOf2nd:
11861 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11863 VMA_ASSERT(!suballocations1st.empty() &&
11864 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11865 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11867 switch(m_2ndVectorMode)
11869 case SECOND_VECTOR_EMPTY:
11871 VMA_ASSERT(suballocations2nd.empty());
11872 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11874 case SECOND_VECTOR_RING_BUFFER:
11876 VMA_ASSERT(!suballocations2nd.empty());
11878 case SECOND_VECTOR_DOUBLE_STACK:
11879 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11885 suballocations2nd.push_back(newSuballoc);
11889 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11892 m_SumFreeSize -= newSuballoc.size;
11895 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11897 FreeAtOffset(allocation->GetOffset());
11900 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11902 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11903 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11905 if(!suballocations1st.empty())
11908 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11909 if(firstSuballoc.offset == offset)
11911 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11912 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11913 m_SumFreeSize += firstSuballoc.size;
11914 ++m_1stNullItemsBeginCount;
11915 CleanupAfterFree();
11921 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11922 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11924 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11925 if(lastSuballoc.offset == offset)
11927 m_SumFreeSize += lastSuballoc.size;
11928 suballocations2nd.pop_back();
11929 CleanupAfterFree();
11934 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11936 VmaSuballocation& lastSuballoc = suballocations1st.back();
11937 if(lastSuballoc.offset == offset)
11939 m_SumFreeSize += lastSuballoc.size;
11940 suballocations1st.pop_back();
11941 CleanupAfterFree();
11948 VmaSuballocation refSuballoc;
11949 refSuballoc.offset = offset;
11951 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11952 suballocations1st.begin() + m_1stNullItemsBeginCount,
11953 suballocations1st.end(),
11955 VmaSuballocationOffsetLess());
11956 if(it != suballocations1st.end())
11958 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11959 it->hAllocation = VK_NULL_HANDLE;
11960 ++m_1stNullItemsMiddleCount;
11961 m_SumFreeSize += it->size;
11962 CleanupAfterFree();
11967 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11970 VmaSuballocation refSuballoc;
11971 refSuballoc.offset = offset;
11973 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11974 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11975 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11976 if(it != suballocations2nd.end())
11978 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11979 it->hAllocation = VK_NULL_HANDLE;
11980 ++m_2ndNullItemsCount;
11981 m_SumFreeSize += it->size;
11982 CleanupAfterFree();
11987 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11990 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11992 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11993 const size_t suballocCount = AccessSuballocations1st().size();
11994 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11997 void VmaBlockMetadata_Linear::CleanupAfterFree()
11999 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
12000 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
12004 suballocations1st.clear();
12005 suballocations2nd.clear();
12006 m_1stNullItemsBeginCount = 0;
12007 m_1stNullItemsMiddleCount = 0;
12008 m_2ndNullItemsCount = 0;
12009 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12013 const size_t suballoc1stCount = suballocations1st.size();
12014 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
12015 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
12018 while(m_1stNullItemsBeginCount < suballoc1stCount &&
12019 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12021 ++m_1stNullItemsBeginCount;
12022 --m_1stNullItemsMiddleCount;
12026 while(m_1stNullItemsMiddleCount > 0 &&
12027 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
12029 --m_1stNullItemsMiddleCount;
12030 suballocations1st.pop_back();
12034 while(m_2ndNullItemsCount > 0 &&
12035 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
12037 --m_2ndNullItemsCount;
12038 suballocations2nd.pop_back();
12042 while(m_2ndNullItemsCount > 0 &&
12043 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
12045 --m_2ndNullItemsCount;
12046 VmaVectorRemove(suballocations2nd, 0);
12049 if(ShouldCompact1st())
12051 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
12052 size_t srcIndex = m_1stNullItemsBeginCount;
12053 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
12055 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
12059 if(dstIndex != srcIndex)
12061 suballocations1st[dstIndex] = suballocations1st[srcIndex];
12065 suballocations1st.resize(nonNullItemCount);
12066 m_1stNullItemsBeginCount = 0;
12067 m_1stNullItemsMiddleCount = 0;
12071 if(suballocations2nd.empty())
12073 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12077 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
12079 suballocations1st.clear();
12080 m_1stNullItemsBeginCount = 0;
12082 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
12085 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12086 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
12087 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
12088 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12090 ++m_1stNullItemsBeginCount;
12091 --m_1stNullItemsMiddleCount;
12093 m_2ndNullItemsCount = 0;
12094 m_1stVectorIndex ^= 1;
12099 VMA_HEAVY_ASSERT(Validate());
12106 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
12107 VmaBlockMetadata(hAllocator),
12109 m_AllocationCount(0),
12113 memset(m_FreeList, 0,
sizeof(m_FreeList));
12116 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
12118 DeleteNode(m_Root);
12121 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
12123 VmaBlockMetadata::Init(size);
12125 m_UsableSize = VmaPrevPow2(size);
12126 m_SumFreeSize = m_UsableSize;
12130 while(m_LevelCount < MAX_LEVELS &&
12131 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
12136 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
12137 rootNode->offset = 0;
12138 rootNode->type = Node::TYPE_FREE;
12139 rootNode->parent = VMA_NULL;
12140 rootNode->buddy = VMA_NULL;
12143 AddToFreeListFront(0, rootNode);
12146 bool VmaBlockMetadata_Buddy::Validate()
const
12149 ValidationContext ctx;
12150 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
12152 VMA_VALIDATE(
false &&
"ValidateNode failed.");
12154 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
12155 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
12158 for(uint32_t level = 0; level < m_LevelCount; ++level)
12160 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
12161 m_FreeList[level].front->free.prev == VMA_NULL);
12163 for(Node* node = m_FreeList[level].front;
12165 node = node->free.next)
12167 VMA_VALIDATE(node->type == Node::TYPE_FREE);
12169 if(node->free.next == VMA_NULL)
12171 VMA_VALIDATE(m_FreeList[level].back == node);
12175 VMA_VALIDATE(node->free.next->free.prev == node);
12181 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
12183 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
12189 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
12191 for(uint32_t level = 0; level < m_LevelCount; ++level)
12193 if(m_FreeList[level].front != VMA_NULL)
12195 return LevelToNodeSize(level);
12201 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
12203 const VkDeviceSize unusableSize = GetUnusableSize();
12214 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
12216 if(unusableSize > 0)
12225 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
12227 const VkDeviceSize unusableSize = GetUnusableSize();
12229 inoutStats.
size += GetSize();
12230 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
12235 if(unusableSize > 0)
12242 #if VMA_STATS_STRING_ENABLED
12244 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
12248 CalcAllocationStatInfo(stat);
12250 PrintDetailedMap_Begin(
12256 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
12258 const VkDeviceSize unusableSize = GetUnusableSize();
12259 if(unusableSize > 0)
12261 PrintDetailedMap_UnusedRange(json,
12266 PrintDetailedMap_End(json);
12271 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
12272 uint32_t currentFrameIndex,
12273 uint32_t frameInUseCount,
12274 VkDeviceSize bufferImageGranularity,
12275 VkDeviceSize allocSize,
12276 VkDeviceSize allocAlignment,
12278 VmaSuballocationType allocType,
12279 bool canMakeOtherLost,
12281 VmaAllocationRequest* pAllocationRequest)
12283 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
12287 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
12288 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
12289 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
12291 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
12292 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
12295 if(allocSize > m_UsableSize)
12300 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12301 for(uint32_t level = targetLevel + 1; level--; )
12303 for(Node* freeNode = m_FreeList[level].front;
12304 freeNode != VMA_NULL;
12305 freeNode = freeNode->free.next)
12307 if(freeNode->offset % allocAlignment == 0)
12309 pAllocationRequest->type = VmaAllocationRequestType::Normal;
12310 pAllocationRequest->offset = freeNode->offset;
12311 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
12312 pAllocationRequest->sumItemSize = 0;
12313 pAllocationRequest->itemsToMakeLostCount = 0;
12314 pAllocationRequest->customData = (
void*)(uintptr_t)level;
12323 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
12324 uint32_t currentFrameIndex,
12325 uint32_t frameInUseCount,
12326 VmaAllocationRequest* pAllocationRequest)
12332 return pAllocationRequest->itemsToMakeLostCount == 0;
12335 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
12344 void VmaBlockMetadata_Buddy::Alloc(
12345 const VmaAllocationRequest& request,
12346 VmaSuballocationType type,
12347 VkDeviceSize allocSize,
12350 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
12352 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12353 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
12355 Node* currNode = m_FreeList[currLevel].front;
12356 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12357 while(currNode->offset != request.offset)
12359 currNode = currNode->free.next;
12360 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12364 while(currLevel < targetLevel)
12368 RemoveFromFreeList(currLevel, currNode);
12370 const uint32_t childrenLevel = currLevel + 1;
12373 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12374 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12376 leftChild->offset = currNode->offset;
12377 leftChild->type = Node::TYPE_FREE;
12378 leftChild->parent = currNode;
12379 leftChild->buddy = rightChild;
12381 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12382 rightChild->type = Node::TYPE_FREE;
12383 rightChild->parent = currNode;
12384 rightChild->buddy = leftChild;
12387 currNode->type = Node::TYPE_SPLIT;
12388 currNode->split.leftChild = leftChild;
12391 AddToFreeListFront(childrenLevel, rightChild);
12392 AddToFreeListFront(childrenLevel, leftChild);
12397 currNode = m_FreeList[currLevel].front;
12406 VMA_ASSERT(currLevel == targetLevel &&
12407 currNode != VMA_NULL &&
12408 currNode->type == Node::TYPE_FREE);
12409 RemoveFromFreeList(currLevel, currNode);
12412 currNode->type = Node::TYPE_ALLOCATION;
12413 currNode->allocation.alloc = hAllocation;
12415 ++m_AllocationCount;
12417 m_SumFreeSize -= allocSize;
12420 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12422 if(node->type == Node::TYPE_SPLIT)
12424 DeleteNode(node->split.leftChild->buddy);
12425 DeleteNode(node->split.leftChild);
12428 vma_delete(GetAllocationCallbacks(), node);
12431 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12433 VMA_VALIDATE(level < m_LevelCount);
12434 VMA_VALIDATE(curr->parent == parent);
12435 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12436 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12439 case Node::TYPE_FREE:
12441 ctx.calculatedSumFreeSize += levelNodeSize;
12442 ++ctx.calculatedFreeCount;
12444 case Node::TYPE_ALLOCATION:
12445 ++ctx.calculatedAllocationCount;
12446 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12447 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12449 case Node::TYPE_SPLIT:
12451 const uint32_t childrenLevel = level + 1;
12452 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12453 const Node*
const leftChild = curr->split.leftChild;
12454 VMA_VALIDATE(leftChild != VMA_NULL);
12455 VMA_VALIDATE(leftChild->offset == curr->offset);
12456 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12458 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12460 const Node*
const rightChild = leftChild->buddy;
12461 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12462 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12464 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12475 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12478 uint32_t level = 0;
12479 VkDeviceSize currLevelNodeSize = m_UsableSize;
12480 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12481 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12484 currLevelNodeSize = nextLevelNodeSize;
12485 nextLevelNodeSize = currLevelNodeSize >> 1;
12490 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12493 Node* node = m_Root;
12494 VkDeviceSize nodeOffset = 0;
12495 uint32_t level = 0;
12496 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12497 while(node->type == Node::TYPE_SPLIT)
12499 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12500 if(offset < nodeOffset + nextLevelSize)
12502 node = node->split.leftChild;
12506 node = node->split.leftChild->buddy;
12507 nodeOffset += nextLevelSize;
12510 levelNodeSize = nextLevelSize;
12513 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12514 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12517 --m_AllocationCount;
12518 m_SumFreeSize += alloc->GetSize();
12520 node->type = Node::TYPE_FREE;
12523 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12525 RemoveFromFreeList(level, node->buddy);
12526 Node*
const parent = node->parent;
12528 vma_delete(GetAllocationCallbacks(), node->buddy);
12529 vma_delete(GetAllocationCallbacks(), node);
12530 parent->type = Node::TYPE_FREE;
12538 AddToFreeListFront(level, node);
12541 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12545 case Node::TYPE_FREE:
12551 case Node::TYPE_ALLOCATION:
12553 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12559 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12560 if(unusedRangeSize > 0)
12569 case Node::TYPE_SPLIT:
12571 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12572 const Node*
const leftChild = node->split.leftChild;
12573 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12574 const Node*
const rightChild = leftChild->buddy;
12575 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12583 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12585 VMA_ASSERT(node->type == Node::TYPE_FREE);
12588 Node*
const frontNode = m_FreeList[level].front;
12589 if(frontNode == VMA_NULL)
12591 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12592 node->free.prev = node->free.next = VMA_NULL;
12593 m_FreeList[level].front = m_FreeList[level].back = node;
12597 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12598 node->free.prev = VMA_NULL;
12599 node->free.next = frontNode;
12600 frontNode->free.prev = node;
12601 m_FreeList[level].front = node;
12605 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12607 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12610 if(node->free.prev == VMA_NULL)
12612 VMA_ASSERT(m_FreeList[level].front == node);
12613 m_FreeList[level].front = node->free.next;
12617 Node*
const prevFreeNode = node->free.prev;
12618 VMA_ASSERT(prevFreeNode->free.next == node);
12619 prevFreeNode->free.next = node->free.next;
12623 if(node->free.next == VMA_NULL)
12625 VMA_ASSERT(m_FreeList[level].back == node);
12626 m_FreeList[level].back = node->free.prev;
12630 Node*
const nextFreeNode = node->free.next;
12631 VMA_ASSERT(nextFreeNode->free.prev == node);
12632 nextFreeNode->free.prev = node->free.prev;
12636 #if VMA_STATS_STRING_ENABLED
12637 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12641 case Node::TYPE_FREE:
12642 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12644 case Node::TYPE_ALLOCATION:
12646 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12647 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12648 if(allocSize < levelNodeSize)
12650 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12654 case Node::TYPE_SPLIT:
12656 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12657 const Node*
const leftChild = node->split.leftChild;
12658 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12659 const Node*
const rightChild = leftChild->buddy;
12660 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12673 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12674 m_pMetadata(VMA_NULL),
12675 m_MemoryTypeIndex(UINT32_MAX),
12677 m_hMemory(VK_NULL_HANDLE),
12679 m_pMappedData(VMA_NULL)
12683 void VmaDeviceMemoryBlock::Init(
12686 uint32_t newMemoryTypeIndex,
12687 VkDeviceMemory newMemory,
12688 VkDeviceSize newSize,
12690 uint32_t algorithm)
12692 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12694 m_hParentPool = hParentPool;
12695 m_MemoryTypeIndex = newMemoryTypeIndex;
12697 m_hMemory = newMemory;
12702 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12705 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12711 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12713 m_pMetadata->Init(newSize);
12716 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12720 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12722 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12723 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12724 m_hMemory = VK_NULL_HANDLE;
12726 vma_delete(allocator, m_pMetadata);
12727 m_pMetadata = VMA_NULL;
12730 bool VmaDeviceMemoryBlock::Validate()
const
12732 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12733 (m_pMetadata->GetSize() != 0));
12735 return m_pMetadata->Validate();
12738 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12740 void* pData =
nullptr;
12741 VkResult res = Map(hAllocator, 1, &pData);
12742 if(res != VK_SUCCESS)
12747 res = m_pMetadata->CheckCorruption(pData);
12749 Unmap(hAllocator, 1);
12754 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12761 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12762 if(m_MapCount != 0)
12764 m_MapCount += count;
12765 VMA_ASSERT(m_pMappedData != VMA_NULL);
12766 if(ppData != VMA_NULL)
12768 *ppData = m_pMappedData;
12774 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12775 hAllocator->m_hDevice,
12781 if(result == VK_SUCCESS)
12783 if(ppData != VMA_NULL)
12785 *ppData = m_pMappedData;
12787 m_MapCount = count;
12793 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12800 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12801 if(m_MapCount >= count)
12803 m_MapCount -= count;
12804 if(m_MapCount == 0)
12806 m_pMappedData = VMA_NULL;
12807 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12812 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12816 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12818 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12819 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12822 VkResult res = Map(hAllocator, 1, &pData);
12823 if(res != VK_SUCCESS)
12828 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12829 VmaWriteMagicValue(pData, allocOffset + allocSize);
12831 Unmap(hAllocator, 1);
12836 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12838 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12839 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12842 VkResult res = Map(hAllocator, 1, &pData);
12843 if(res != VK_SUCCESS)
12848 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12850 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12852 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12854 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12857 Unmap(hAllocator, 1);
12862 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12865 VkDeviceSize allocationLocalOffset,
12869 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12870 hAllocation->GetBlock() ==
this);
12871 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12872 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12873 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12875 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12876 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12879 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12882 VkDeviceSize allocationLocalOffset,
12886 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12887 hAllocation->GetBlock() ==
this);
12888 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12889 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12890 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12892 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12893 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12898 memset(&outInfo, 0,
sizeof(outInfo));
12917 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12925 VmaPool_T::VmaPool_T(
12928 VkDeviceSize preferredBlockSize) :
12932 createInfo.memoryTypeIndex,
12933 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12934 createInfo.minBlockCount,
12935 createInfo.maxBlockCount,
12937 createInfo.frameInUseCount,
12938 createInfo.blockSize != 0,
12940 createInfo.priority,
12941 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment),
12942 createInfo.pMemoryAllocateNext),
12948 VmaPool_T::~VmaPool_T()
12950 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
12953 void VmaPool_T::SetName(
const char* pName)
12955 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12956 VmaFreeString(allocs, m_Name);
12958 if(pName != VMA_NULL)
12960 m_Name = VmaCreateStringCopy(allocs, pName);
12968 #if VMA_STATS_STRING_ENABLED
12972 VmaBlockVector::VmaBlockVector(
12975 uint32_t memoryTypeIndex,
12976 VkDeviceSize preferredBlockSize,
12977 size_t minBlockCount,
12978 size_t maxBlockCount,
12979 VkDeviceSize bufferImageGranularity,
12980 uint32_t frameInUseCount,
12981 bool explicitBlockSize,
12982 uint32_t algorithm,
12984 VkDeviceSize minAllocationAlignment,
12985 void* pMemoryAllocateNext) :
12986 m_hAllocator(hAllocator),
12987 m_hParentPool(hParentPool),
12988 m_MemoryTypeIndex(memoryTypeIndex),
12989 m_PreferredBlockSize(preferredBlockSize),
12990 m_MinBlockCount(minBlockCount),
12991 m_MaxBlockCount(maxBlockCount),
12992 m_BufferImageGranularity(bufferImageGranularity),
12993 m_FrameInUseCount(frameInUseCount),
12994 m_ExplicitBlockSize(explicitBlockSize),
12995 m_Algorithm(algorithm),
12996 m_Priority(priority),
12997 m_MinAllocationAlignment(minAllocationAlignment),
12998 m_pMemoryAllocateNext(pMemoryAllocateNext),
12999 m_HasEmptyBlock(false),
13000 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
13005 VmaBlockVector::~VmaBlockVector()
13007 for(
size_t i = m_Blocks.size(); i--; )
13009 m_Blocks[i]->Destroy(m_hAllocator);
13010 vma_delete(m_hAllocator, m_Blocks[i]);
13014 VkResult VmaBlockVector::CreateMinBlocks()
13016 for(
size_t i = 0; i < m_MinBlockCount; ++i)
13018 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
13019 if(res != VK_SUCCESS)
13027 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
13029 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13031 const size_t blockCount = m_Blocks.size();
13040 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13042 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13043 VMA_ASSERT(pBlock);
13044 VMA_HEAVY_ASSERT(pBlock->Validate());
13045 pBlock->m_pMetadata->AddPoolStats(*pStats);
13049 bool VmaBlockVector::IsEmpty()
13051 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13052 return m_Blocks.empty();
13055 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
13057 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13058 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
13059 (VMA_DEBUG_MARGIN > 0) &&
13061 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
13064 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
13066 VkResult VmaBlockVector::Allocate(
13067 uint32_t currentFrameIndex,
13069 VkDeviceSize alignment,
13071 VmaSuballocationType suballocType,
13072 size_t allocationCount,
13076 VkResult res = VK_SUCCESS;
13078 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
13080 if(IsCorruptionDetectionEnabled())
13082 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13083 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13087 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13088 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13090 res = AllocatePage(
13096 pAllocations + allocIndex);
13097 if(res != VK_SUCCESS)
13104 if(res != VK_SUCCESS)
13107 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13108 while(allocIndex--)
13110 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
13111 const VkDeviceSize allocSize = alloc->GetSize();
13113 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
13115 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
13121 VkResult VmaBlockVector::AllocatePage(
13122 uint32_t currentFrameIndex,
13124 VkDeviceSize alignment,
13126 VmaSuballocationType suballocType,
13134 VkDeviceSize freeMemory;
13136 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13138 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13142 const bool canFallbackToDedicated = !IsCustomPool();
13143 const bool canCreateNewBlock =
13145 (m_Blocks.size() < m_MaxBlockCount) &&
13146 (freeMemory >= size || !canFallbackToDedicated);
13153 canMakeOtherLost =
false;
13157 if(isUpperAddress &&
13160 return VK_ERROR_FEATURE_NOT_PRESENT;
13174 return VK_ERROR_FEATURE_NOT_PRESENT;
13178 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
13180 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13188 if(!canMakeOtherLost || canCreateNewBlock)
13197 if(!m_Blocks.empty())
13199 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
13200 VMA_ASSERT(pCurrBlock);
13201 VkResult res = AllocateFromBlock(
13211 if(res == VK_SUCCESS)
13213 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
13223 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13225 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13226 VMA_ASSERT(pCurrBlock);
13227 VkResult res = AllocateFromBlock(
13237 if(res == VK_SUCCESS)
13239 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13247 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13249 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13250 VMA_ASSERT(pCurrBlock);
13251 VkResult res = AllocateFromBlock(
13261 if(res == VK_SUCCESS)
13263 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13271 if(canCreateNewBlock)
13274 VkDeviceSize newBlockSize = m_PreferredBlockSize;
13275 uint32_t newBlockSizeShift = 0;
13276 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
13278 if(!m_ExplicitBlockSize)
13281 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
13282 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
13284 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13285 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
13287 newBlockSize = smallerNewBlockSize;
13288 ++newBlockSizeShift;
13297 size_t newBlockIndex = 0;
13298 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13299 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13301 if(!m_ExplicitBlockSize)
13303 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
13305 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13306 if(smallerNewBlockSize >= size)
13308 newBlockSize = smallerNewBlockSize;
13309 ++newBlockSizeShift;
13310 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13311 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13320 if(res == VK_SUCCESS)
13322 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
13323 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
13325 res = AllocateFromBlock(
13335 if(res == VK_SUCCESS)
13337 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
13343 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13350 if(canMakeOtherLost)
13352 uint32_t tryIndex = 0;
13353 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
13355 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
13356 VmaAllocationRequest bestRequest = {};
13357 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
13363 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13365 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13366 VMA_ASSERT(pCurrBlock);
13367 VmaAllocationRequest currRequest = {};
13368 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13371 m_BufferImageGranularity,
13380 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13381 if(pBestRequestBlock == VMA_NULL ||
13382 currRequestCost < bestRequestCost)
13384 pBestRequestBlock = pCurrBlock;
13385 bestRequest = currRequest;
13386 bestRequestCost = currRequestCost;
13388 if(bestRequestCost == 0)
13399 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13401 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13402 VMA_ASSERT(pCurrBlock);
13403 VmaAllocationRequest currRequest = {};
13404 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13407 m_BufferImageGranularity,
13416 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13417 if(pBestRequestBlock == VMA_NULL ||
13418 currRequestCost < bestRequestCost ||
13421 pBestRequestBlock = pCurrBlock;
13422 bestRequest = currRequest;
13423 bestRequestCost = currRequestCost;
13425 if(bestRequestCost == 0 ||
13435 if(pBestRequestBlock != VMA_NULL)
13439 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13440 if(res != VK_SUCCESS)
13446 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13452 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13453 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13454 UpdateHasEmptyBlock();
13455 (*pAllocation)->InitBlockAllocation(
13457 bestRequest.offset,
13464 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13465 VMA_DEBUG_LOG(
" Returned from existing block");
13466 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13467 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13468 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13470 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13472 if(IsCorruptionDetectionEnabled())
13474 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13475 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13490 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13492 return VK_ERROR_TOO_MANY_OBJECTS;
13496 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13499 void VmaBlockVector::Free(
13502 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13504 bool budgetExceeded =
false;
13506 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13508 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13509 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13514 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13516 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13518 if(IsCorruptionDetectionEnabled())
13520 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13521 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13524 if(hAllocation->IsPersistentMap())
13526 pBlock->Unmap(m_hAllocator, 1);
13529 pBlock->m_pMetadata->Free(hAllocation);
13530 VMA_HEAVY_ASSERT(pBlock->Validate());
13532 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13534 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13536 if(pBlock->m_pMetadata->IsEmpty())
13539 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13541 pBlockToDelete = pBlock;
13548 else if(m_HasEmptyBlock && canDeleteBlock)
13550 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13551 if(pLastBlock->m_pMetadata->IsEmpty())
13553 pBlockToDelete = pLastBlock;
13554 m_Blocks.pop_back();
13558 UpdateHasEmptyBlock();
13559 IncrementallySortBlocks();
13564 if(pBlockToDelete != VMA_NULL)
13566 VMA_DEBUG_LOG(
" Deleted empty block");
13567 pBlockToDelete->Destroy(m_hAllocator);
13568 vma_delete(m_hAllocator, pBlockToDelete);
13572 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13574 VkDeviceSize result = 0;
13575 for(
size_t i = m_Blocks.size(); i--; )
13577 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13578 if(result >= m_PreferredBlockSize)
13586 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13588 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13590 if(m_Blocks[blockIndex] == pBlock)
13592 VmaVectorRemove(m_Blocks, blockIndex);
13599 void VmaBlockVector::IncrementallySortBlocks()
13604 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13606 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13608 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13615 VkResult VmaBlockVector::AllocateFromBlock(
13616 VmaDeviceMemoryBlock* pBlock,
13617 uint32_t currentFrameIndex,
13619 VkDeviceSize alignment,
13622 VmaSuballocationType suballocType,
13631 VmaAllocationRequest currRequest = {};
13632 if(pBlock->m_pMetadata->CreateAllocationRequest(
13635 m_BufferImageGranularity,
13645 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13649 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13650 if(res != VK_SUCCESS)
13656 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13657 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13658 UpdateHasEmptyBlock();
13659 (*pAllocation)->InitBlockAllocation(
13661 currRequest.offset,
13668 VMA_HEAVY_ASSERT(pBlock->Validate());
13669 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13670 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13671 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13673 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13675 if(IsCorruptionDetectionEnabled())
13677 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13678 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13682 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13685 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13687 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13688 allocInfo.pNext = m_pMemoryAllocateNext;
13689 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13690 allocInfo.allocationSize = blockSize;
13692 #if VMA_BUFFER_DEVICE_ADDRESS
13694 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13695 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13697 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13698 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13702 #if VMA_MEMORY_PRIORITY
13703 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
13704 if(m_hAllocator->m_UseExtMemoryPriority)
13706 priorityInfo.priority = m_Priority;
13707 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
13711 #if VMA_EXTERNAL_MEMORY
13713 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
13714 exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex);
13715 if(exportMemoryAllocInfo.handleTypes != 0)
13717 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
13721 VkDeviceMemory mem = VK_NULL_HANDLE;
13722 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13731 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13737 allocInfo.allocationSize,
13741 m_Blocks.push_back(pBlock);
13742 if(pNewBlockIndex != VMA_NULL)
13744 *pNewBlockIndex = m_Blocks.size() - 1;
13750 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13751 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13752 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13754 const size_t blockCount = m_Blocks.size();
13755 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13759 BLOCK_FLAG_USED = 0x00000001,
13760 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13768 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13769 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13770 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13773 const size_t moveCount = moves.size();
13774 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13776 const VmaDefragmentationMove& move = moves[moveIndex];
13777 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13778 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13781 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13784 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13786 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13787 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13788 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13790 currBlockInfo.pMappedData = pBlock->GetMappedData();
13792 if(currBlockInfo.pMappedData == VMA_NULL)
13794 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13795 if(pDefragCtx->res == VK_SUCCESS)
13797 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13804 if(pDefragCtx->res == VK_SUCCESS)
13806 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13807 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13809 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13811 const VmaDefragmentationMove& move = moves[moveIndex];
13813 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13814 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13816 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13821 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13822 memRange.memory = pSrcBlock->GetDeviceMemory();
13823 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13824 memRange.size = VMA_MIN(
13825 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13826 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13827 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13832 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13833 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13834 static_cast<size_t>(move.size));
13836 if(IsCorruptionDetectionEnabled())
13838 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13839 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13845 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13846 memRange.memory = pDstBlock->GetDeviceMemory();
13847 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13848 memRange.size = VMA_MIN(
13849 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13850 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13851 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13858 for(
size_t blockIndex = blockCount; blockIndex--; )
13860 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13861 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13863 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13864 pBlock->Unmap(m_hAllocator, 1);
13869 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13870 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13871 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13872 VkCommandBuffer commandBuffer)
13874 const size_t blockCount = m_Blocks.size();
13876 pDefragCtx->blockContexts.resize(blockCount);
13877 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13880 const size_t moveCount = moves.size();
13881 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13883 const VmaDefragmentationMove& move = moves[moveIndex];
13888 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13889 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13893 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13897 VkBufferCreateInfo bufCreateInfo;
13898 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13900 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13902 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13903 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13904 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13906 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13907 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13908 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13909 if(pDefragCtx->res == VK_SUCCESS)
13911 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13912 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13919 if(pDefragCtx->res == VK_SUCCESS)
13921 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13923 const VmaDefragmentationMove& move = moves[moveIndex];
13925 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13926 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13928 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13930 VkBufferCopy region = {
13934 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13935 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13940 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13942 pDefragCtx->res = VK_NOT_READY;
13948 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13950 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13951 if(pBlock->m_pMetadata->IsEmpty())
13953 if(m_Blocks.size() > m_MinBlockCount)
13955 if(pDefragmentationStats != VMA_NULL)
13958 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13961 VmaVectorRemove(m_Blocks, blockIndex);
13962 pBlock->Destroy(m_hAllocator);
13963 vma_delete(m_hAllocator, pBlock);
13971 UpdateHasEmptyBlock();
13974 void VmaBlockVector::UpdateHasEmptyBlock()
13976 m_HasEmptyBlock =
false;
13977 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13979 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13980 if(pBlock->m_pMetadata->IsEmpty())
13982 m_HasEmptyBlock =
true;
13988 #if VMA_STATS_STRING_ENABLED
13990 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13992 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13994 json.BeginObject();
13998 const char* poolName = m_hParentPool->GetName();
13999 if(poolName != VMA_NULL && poolName[0] !=
'\0')
14001 json.WriteString(
"Name");
14002 json.WriteString(poolName);
14005 json.WriteString(
"MemoryTypeIndex");
14006 json.WriteNumber(m_MemoryTypeIndex);
14008 json.WriteString(
"BlockSize");
14009 json.WriteNumber(m_PreferredBlockSize);
14011 json.WriteString(
"BlockCount");
14012 json.BeginObject(
true);
14013 if(m_MinBlockCount > 0)
14015 json.WriteString(
"Min");
14016 json.WriteNumber((uint64_t)m_MinBlockCount);
14018 if(m_MaxBlockCount < SIZE_MAX)
14020 json.WriteString(
"Max");
14021 json.WriteNumber((uint64_t)m_MaxBlockCount);
14023 json.WriteString(
"Cur");
14024 json.WriteNumber((uint64_t)m_Blocks.size());
14027 if(m_FrameInUseCount > 0)
14029 json.WriteString(
"FrameInUseCount");
14030 json.WriteNumber(m_FrameInUseCount);
14033 if(m_Algorithm != 0)
14035 json.WriteString(
"Algorithm");
14036 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
14041 json.WriteString(
"PreferredBlockSize");
14042 json.WriteNumber(m_PreferredBlockSize);
14045 json.WriteString(
"Blocks");
14046 json.BeginObject();
14047 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14049 json.BeginString();
14050 json.ContinueString(m_Blocks[i]->GetId());
14053 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
14062 void VmaBlockVector::Defragment(
14063 class VmaBlockVectorDefragmentationContext* pCtx,
14065 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
14066 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
14067 VkCommandBuffer commandBuffer)
14069 pCtx->res = VK_SUCCESS;
14071 const VkMemoryPropertyFlags memPropFlags =
14072 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
14073 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
14075 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
14077 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
14078 !IsCorruptionDetectionEnabled() &&
14079 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
14082 if(canDefragmentOnCpu || canDefragmentOnGpu)
14084 bool defragmentOnGpu;
14086 if(canDefragmentOnGpu != canDefragmentOnCpu)
14088 defragmentOnGpu = canDefragmentOnGpu;
14093 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
14094 m_hAllocator->IsIntegratedGpu();
14097 bool overlappingMoveSupported = !defragmentOnGpu;
14099 if(m_hAllocator->m_UseMutex)
14103 if(!m_Mutex.TryLockWrite())
14105 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
14111 m_Mutex.LockWrite();
14112 pCtx->mutexLocked =
true;
14116 pCtx->Begin(overlappingMoveSupported, flags);
14120 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
14121 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
14122 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
14125 if(pStats != VMA_NULL)
14127 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
14128 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
14131 VMA_ASSERT(bytesMoved <= maxBytesToMove);
14132 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
14133 if(defragmentOnGpu)
14135 maxGpuBytesToMove -= bytesMoved;
14136 maxGpuAllocationsToMove -= allocationsMoved;
14140 maxCpuBytesToMove -= bytesMoved;
14141 maxCpuAllocationsToMove -= allocationsMoved;
14147 if(m_hAllocator->m_UseMutex)
14148 m_Mutex.UnlockWrite();
14150 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
14151 pCtx->res = VK_NOT_READY;
14156 if(pCtx->res >= VK_SUCCESS)
14158 if(defragmentOnGpu)
14160 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
14164 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
14170 void VmaBlockVector::DefragmentationEnd(
14171 class VmaBlockVectorDefragmentationContext* pCtx,
14177 VMA_ASSERT(pCtx->mutexLocked ==
false);
14181 m_Mutex.LockWrite();
14182 pCtx->mutexLocked =
true;
14186 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
14189 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
14191 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
14192 if(blockCtx.hBuffer)
14194 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
14198 if(pCtx->res >= VK_SUCCESS)
14200 FreeEmptyBlocks(pStats);
14204 if(pCtx->mutexLocked)
14206 VMA_ASSERT(m_hAllocator->m_UseMutex);
14207 m_Mutex.UnlockWrite();
14211 uint32_t VmaBlockVector::ProcessDefragmentations(
14212 class VmaBlockVectorDefragmentationContext *pCtx,
14215 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14217 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
14219 for(uint32_t i = 0; i < moveCount; ++ i)
14221 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
14224 pMove->
memory = move.pDstBlock->GetDeviceMemory();
14225 pMove->
offset = move.dstOffset;
14230 pCtx->defragmentationMovesProcessed += moveCount;
14235 void VmaBlockVector::CommitDefragmentations(
14236 class VmaBlockVectorDefragmentationContext *pCtx,
14239 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14241 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
14243 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
14245 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
14246 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
14249 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
14250 FreeEmptyBlocks(pStats);
14253 size_t VmaBlockVector::CalcAllocationCount()
const
14256 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14258 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
14263 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
14265 if(m_BufferImageGranularity == 1)
14269 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
14270 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
14272 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
14273 VMA_ASSERT(m_Algorithm == 0);
14274 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
14275 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
14283 void VmaBlockVector::MakePoolAllocationsLost(
14284 uint32_t currentFrameIndex,
14285 size_t* pLostAllocationCount)
14287 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14288 size_t lostAllocationCount = 0;
14289 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14291 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14292 VMA_ASSERT(pBlock);
14293 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
14295 if(pLostAllocationCount != VMA_NULL)
14297 *pLostAllocationCount = lostAllocationCount;
14301 VkResult VmaBlockVector::CheckCorruption()
14303 if(!IsCorruptionDetectionEnabled())
14305 return VK_ERROR_FEATURE_NOT_PRESENT;
14308 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14309 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14311 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14312 VMA_ASSERT(pBlock);
14313 VkResult res = pBlock->CheckCorruption(m_hAllocator);
14314 if(res != VK_SUCCESS)
14322 void VmaBlockVector::AddStats(
VmaStats* pStats)
14324 const uint32_t memTypeIndex = m_MemoryTypeIndex;
14325 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
14327 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14329 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14331 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14332 VMA_ASSERT(pBlock);
14333 VMA_HEAVY_ASSERT(pBlock->Validate());
14335 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
14336 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14337 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14338 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14345 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
14347 VmaBlockVector* pBlockVector,
14348 uint32_t currentFrameIndex,
14349 bool overlappingMoveSupported) :
14350 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14351 m_AllocationCount(0),
14352 m_AllAllocations(false),
14354 m_AllocationsMoved(0),
14355 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
14358 const size_t blockCount = m_pBlockVector->m_Blocks.size();
14359 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14361 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
14362 pBlockInfo->m_OriginalBlockIndex = blockIndex;
14363 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
14364 m_Blocks.push_back(pBlockInfo);
14368 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
14371 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
14373 for(
size_t i = m_Blocks.size(); i--; )
14375 vma_delete(m_hAllocator, m_Blocks[i]);
14379 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14382 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
14384 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
14385 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
14386 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
14388 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
14389 (*it)->m_Allocations.push_back(allocInfo);
14396 ++m_AllocationCount;
14400 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
14401 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14402 VkDeviceSize maxBytesToMove,
14403 uint32_t maxAllocationsToMove,
14404 bool freeOldAllocations)
14406 if(m_Blocks.empty())
14419 size_t srcBlockMinIndex = 0;
14432 size_t srcBlockIndex = m_Blocks.size() - 1;
14433 size_t srcAllocIndex = SIZE_MAX;
14439 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14441 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14444 if(srcBlockIndex == srcBlockMinIndex)
14451 srcAllocIndex = SIZE_MAX;
14456 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14460 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14461 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14463 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14464 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14465 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14466 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14469 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14471 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14472 VmaAllocationRequest dstAllocRequest;
14473 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14474 m_CurrentFrameIndex,
14475 m_pBlockVector->GetFrameInUseCount(),
14476 m_pBlockVector->GetBufferImageGranularity(),
14483 &dstAllocRequest) &&
14485 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14487 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14490 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14491 (m_BytesMoved + size > maxBytesToMove))
14496 VmaDefragmentationMove move = {};
14497 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14498 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14499 move.srcOffset = srcOffset;
14500 move.dstOffset = dstAllocRequest.offset;
14502 move.hAllocation = allocInfo.m_hAllocation;
14503 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14504 move.pDstBlock = pDstBlockInfo->m_pBlock;
14506 moves.push_back(move);
14508 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14512 allocInfo.m_hAllocation);
14514 if(freeOldAllocations)
14516 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14517 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14520 if(allocInfo.m_pChanged != VMA_NULL)
14522 *allocInfo.m_pChanged = VK_TRUE;
14525 ++m_AllocationsMoved;
14526 m_BytesMoved += size;
14528 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14536 if(srcAllocIndex > 0)
14542 if(srcBlockIndex > 0)
14545 srcAllocIndex = SIZE_MAX;
14555 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14558 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14560 if(m_Blocks[i]->m_HasNonMovableAllocations)
14568 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14569 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14570 VkDeviceSize maxBytesToMove,
14571 uint32_t maxAllocationsToMove,
14574 if(!m_AllAllocations && m_AllocationCount == 0)
14579 const size_t blockCount = m_Blocks.size();
14580 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14582 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14584 if(m_AllAllocations)
14586 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14587 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14588 it != pMetadata->m_Suballocations.end();
14591 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14593 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14594 pBlockInfo->m_Allocations.push_back(allocInfo);
14599 pBlockInfo->CalcHasNonMovableAllocations();
14603 pBlockInfo->SortAllocationsByOffsetDescending();
14609 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14612 const uint32_t roundCount = 2;
14615 VkResult result = VK_SUCCESS;
14616 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14624 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14625 size_t dstBlockIndex, VkDeviceSize dstOffset,
14626 size_t srcBlockIndex, VkDeviceSize srcOffset)
14628 if(dstBlockIndex < srcBlockIndex)
14632 if(dstBlockIndex > srcBlockIndex)
14636 if(dstOffset < srcOffset)
14646 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14648 VmaBlockVector* pBlockVector,
14649 uint32_t currentFrameIndex,
14650 bool overlappingMoveSupported) :
14651 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14652 m_OverlappingMoveSupported(overlappingMoveSupported),
14653 m_AllocationCount(0),
14654 m_AllAllocations(false),
14656 m_AllocationsMoved(0),
14657 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14659 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14663 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14667 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14668 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14669 VkDeviceSize maxBytesToMove,
14670 uint32_t maxAllocationsToMove,
14673 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14675 const size_t blockCount = m_pBlockVector->GetBlockCount();
14676 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14681 PreprocessMetadata();
14685 m_BlockInfos.resize(blockCount);
14686 for(
size_t i = 0; i < blockCount; ++i)
14688 m_BlockInfos[i].origBlockIndex = i;
14691 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14692 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14693 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14698 FreeSpaceDatabase freeSpaceDb;
14700 size_t dstBlockInfoIndex = 0;
14701 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14702 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14703 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14704 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14705 VkDeviceSize dstOffset = 0;
14708 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14710 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14711 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14712 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14713 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14714 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14716 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14717 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14718 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14719 if(m_AllocationsMoved == maxAllocationsToMove ||
14720 m_BytesMoved + srcAllocSize > maxBytesToMove)
14725 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14727 VmaDefragmentationMove move = {};
14729 size_t freeSpaceInfoIndex;
14730 VkDeviceSize dstAllocOffset;
14731 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14732 freeSpaceInfoIndex, dstAllocOffset))
14734 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14735 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14736 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14739 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14741 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14745 VmaSuballocation suballoc = *srcSuballocIt;
14746 suballoc.offset = dstAllocOffset;
14747 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14748 m_BytesMoved += srcAllocSize;
14749 ++m_AllocationsMoved;
14751 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14753 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14754 srcSuballocIt = nextSuballocIt;
14756 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14758 move.srcBlockIndex = srcOrigBlockIndex;
14759 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14760 move.srcOffset = srcAllocOffset;
14761 move.dstOffset = dstAllocOffset;
14762 move.size = srcAllocSize;
14764 moves.push_back(move);
14771 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14773 VmaSuballocation suballoc = *srcSuballocIt;
14774 suballoc.offset = dstAllocOffset;
14775 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14776 m_BytesMoved += srcAllocSize;
14777 ++m_AllocationsMoved;
14779 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14781 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14782 srcSuballocIt = nextSuballocIt;
14784 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14786 move.srcBlockIndex = srcOrigBlockIndex;
14787 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14788 move.srcOffset = srcAllocOffset;
14789 move.dstOffset = dstAllocOffset;
14790 move.size = srcAllocSize;
14792 moves.push_back(move);
14797 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14800 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14801 dstAllocOffset + srcAllocSize > dstBlockSize)
14804 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14806 ++dstBlockInfoIndex;
14807 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14808 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14809 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14810 dstBlockSize = pDstMetadata->GetSize();
14812 dstAllocOffset = 0;
14816 if(dstBlockInfoIndex == srcBlockInfoIndex)
14818 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14820 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14822 bool skipOver = overlap;
14823 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14827 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14832 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14834 dstOffset = srcAllocOffset + srcAllocSize;
14840 srcSuballocIt->offset = dstAllocOffset;
14841 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14842 dstOffset = dstAllocOffset + srcAllocSize;
14843 m_BytesMoved += srcAllocSize;
14844 ++m_AllocationsMoved;
14847 move.srcBlockIndex = srcOrigBlockIndex;
14848 move.dstBlockIndex = dstOrigBlockIndex;
14849 move.srcOffset = srcAllocOffset;
14850 move.dstOffset = dstAllocOffset;
14851 move.size = srcAllocSize;
14853 moves.push_back(move);
14861 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14862 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14864 VmaSuballocation suballoc = *srcSuballocIt;
14865 suballoc.offset = dstAllocOffset;
14866 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14867 dstOffset = dstAllocOffset + srcAllocSize;
14868 m_BytesMoved += srcAllocSize;
14869 ++m_AllocationsMoved;
14871 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14873 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14874 srcSuballocIt = nextSuballocIt;
14876 pDstMetadata->m_Suballocations.push_back(suballoc);
14878 move.srcBlockIndex = srcOrigBlockIndex;
14879 move.dstBlockIndex = dstOrigBlockIndex;
14880 move.srcOffset = srcAllocOffset;
14881 move.dstOffset = dstAllocOffset;
14882 move.size = srcAllocSize;
14884 moves.push_back(move);
14890 m_BlockInfos.clear();
14892 PostprocessMetadata();
14897 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14899 const size_t blockCount = m_pBlockVector->GetBlockCount();
14900 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14902 VmaBlockMetadata_Generic*
const pMetadata =
14903 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14904 pMetadata->m_FreeCount = 0;
14905 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14906 pMetadata->m_FreeSuballocationsBySize.clear();
14907 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14908 it != pMetadata->m_Suballocations.end(); )
14910 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14912 VmaSuballocationList::iterator nextIt = it;
14914 pMetadata->m_Suballocations.erase(it);
14925 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14927 const size_t blockCount = m_pBlockVector->GetBlockCount();
14928 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14930 VmaBlockMetadata_Generic*
const pMetadata =
14931 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14932 const VkDeviceSize blockSize = pMetadata->GetSize();
14935 if(pMetadata->m_Suballocations.empty())
14937 pMetadata->m_FreeCount = 1;
14939 VmaSuballocation suballoc = {
14943 VMA_SUBALLOCATION_TYPE_FREE };
14944 pMetadata->m_Suballocations.push_back(suballoc);
14945 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14950 VkDeviceSize offset = 0;
14951 VmaSuballocationList::iterator it;
14952 for(it = pMetadata->m_Suballocations.begin();
14953 it != pMetadata->m_Suballocations.end();
14956 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14957 VMA_ASSERT(it->offset >= offset);
14960 if(it->offset > offset)
14962 ++pMetadata->m_FreeCount;
14963 const VkDeviceSize freeSize = it->offset - offset;
14964 VmaSuballocation suballoc = {
14968 VMA_SUBALLOCATION_TYPE_FREE };
14969 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14970 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14972 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14976 pMetadata->m_SumFreeSize -= it->size;
14977 offset = it->offset + it->size;
14981 if(offset < blockSize)
14983 ++pMetadata->m_FreeCount;
14984 const VkDeviceSize freeSize = blockSize - offset;
14985 VmaSuballocation suballoc = {
14989 VMA_SUBALLOCATION_TYPE_FREE };
14990 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14991 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14992 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14994 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14999 pMetadata->m_FreeSuballocationsBySize.begin(),
15000 pMetadata->m_FreeSuballocationsBySize.end(),
15001 VmaSuballocationItemSizeLess());
15004 VMA_HEAVY_ASSERT(pMetadata->Validate());
15008 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
15011 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
15012 while(it != pMetadata->m_Suballocations.end())
15014 if(it->offset < suballoc.offset)
15019 pMetadata->m_Suballocations.insert(it, suballoc);
15025 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
15028 VmaBlockVector* pBlockVector,
15029 uint32_t currFrameIndex) :
15031 mutexLocked(false),
15032 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
15033 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
15034 defragmentationMovesProcessed(0),
15035 defragmentationMovesCommitted(0),
15036 hasDefragmentationPlan(0),
15037 m_hAllocator(hAllocator),
15038 m_hCustomPool(hCustomPool),
15039 m_pBlockVector(pBlockVector),
15040 m_CurrFrameIndex(currFrameIndex),
15041 m_pAlgorithm(VMA_NULL),
15042 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
15043 m_AllAllocations(false)
15047 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
15049 vma_delete(m_hAllocator, m_pAlgorithm);
15052 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
15054 AllocInfo info = { hAlloc, pChanged };
15055 m_Allocations.push_back(info);
15058 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
15060 const bool allAllocations = m_AllAllocations ||
15061 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
15074 if(VMA_DEBUG_MARGIN == 0 &&
15076 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
15079 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
15080 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15084 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
15085 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15090 m_pAlgorithm->AddAll();
15094 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
15096 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
15104 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
15106 uint32_t currFrameIndex,
15109 m_hAllocator(hAllocator),
15110 m_CurrFrameIndex(currFrameIndex),
15113 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
15115 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
15118 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
15120 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15122 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
15123 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15124 vma_delete(m_hAllocator, pBlockVectorCtx);
15126 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
15128 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
15129 if(pBlockVectorCtx)
15131 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15132 vma_delete(m_hAllocator, pBlockVectorCtx);
15137 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
15139 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15141 VmaPool pool = pPools[poolIndex];
15144 if(pool->m_BlockVector.GetAlgorithm() == 0)
15146 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15148 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15150 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
15152 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15157 if(!pBlockVectorDefragCtx)
15159 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15162 &pool->m_BlockVector,
15164 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15167 pBlockVectorDefragCtx->AddAll();
15172 void VmaDefragmentationContext_T::AddAllocations(
15173 uint32_t allocationCount,
15175 VkBool32* pAllocationsChanged)
15178 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15181 VMA_ASSERT(hAlloc);
15183 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
15185 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
15187 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15189 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
15191 if(hAllocPool != VK_NULL_HANDLE)
15194 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
15196 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15198 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
15200 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15204 if(!pBlockVectorDefragCtx)
15206 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15209 &hAllocPool->m_BlockVector,
15211 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15218 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
15219 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
15220 if(!pBlockVectorDefragCtx)
15222 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15225 m_hAllocator->m_pBlockVectors[memTypeIndex],
15227 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
15231 if(pBlockVectorDefragCtx)
15233 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
15234 &pAllocationsChanged[allocIndex] : VMA_NULL;
15235 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
15241 VkResult VmaDefragmentationContext_T::Defragment(
15242 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
15243 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
15255 m_MaxCpuBytesToMove = maxCpuBytesToMove;
15256 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
15258 m_MaxGpuBytesToMove = maxGpuBytesToMove;
15259 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
15261 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
15262 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
15265 return VK_NOT_READY;
15268 if(commandBuffer == VK_NULL_HANDLE)
15270 maxGpuBytesToMove = 0;
15271 maxGpuAllocationsToMove = 0;
15274 VkResult res = VK_SUCCESS;
15277 for(uint32_t memTypeIndex = 0;
15278 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
15281 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15282 if(pBlockVectorCtx)
15284 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15285 pBlockVectorCtx->GetBlockVector()->Defragment(
15288 maxCpuBytesToMove, maxCpuAllocationsToMove,
15289 maxGpuBytesToMove, maxGpuAllocationsToMove,
15291 if(pBlockVectorCtx->res != VK_SUCCESS)
15293 res = pBlockVectorCtx->res;
15299 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15300 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
15303 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15304 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15305 pBlockVectorCtx->GetBlockVector()->Defragment(
15308 maxCpuBytesToMove, maxCpuAllocationsToMove,
15309 maxGpuBytesToMove, maxGpuAllocationsToMove,
15311 if(pBlockVectorCtx->res != VK_SUCCESS)
15313 res = pBlockVectorCtx->res;
15326 for(uint32_t memTypeIndex = 0;
15327 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15330 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15331 if(pBlockVectorCtx)
15333 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15335 if(!pBlockVectorCtx->hasDefragmentationPlan)
15337 pBlockVectorCtx->GetBlockVector()->Defragment(
15340 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15341 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15344 if(pBlockVectorCtx->res < VK_SUCCESS)
15347 pBlockVectorCtx->hasDefragmentationPlan =
true;
15350 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15352 pCurrentMove, movesLeft);
15354 movesLeft -= processed;
15355 pCurrentMove += processed;
15360 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15361 customCtxIndex < customCtxCount;
15364 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15365 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15367 if(!pBlockVectorCtx->hasDefragmentationPlan)
15369 pBlockVectorCtx->GetBlockVector()->Defragment(
15372 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15373 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15376 if(pBlockVectorCtx->res < VK_SUCCESS)
15379 pBlockVectorCtx->hasDefragmentationPlan =
true;
15382 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15384 pCurrentMove, movesLeft);
15386 movesLeft -= processed;
15387 pCurrentMove += processed;
15394 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
15396 VkResult res = VK_SUCCESS;
15399 for(uint32_t memTypeIndex = 0;
15400 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15403 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15404 if(pBlockVectorCtx)
15406 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15408 if(!pBlockVectorCtx->hasDefragmentationPlan)
15410 res = VK_NOT_READY;
15414 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15415 pBlockVectorCtx, m_pStats);
15417 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15418 res = VK_NOT_READY;
15423 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15424 customCtxIndex < customCtxCount;
15427 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15428 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15430 if(!pBlockVectorCtx->hasDefragmentationPlan)
15432 res = VK_NOT_READY;
15436 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15437 pBlockVectorCtx, m_pStats);
15439 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15440 res = VK_NOT_READY;
15449 #if VMA_RECORDING_ENABLED
15451 VmaRecorder::VmaRecorder() :
15455 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15461 m_UseMutex = useMutex;
15462 m_Flags = settings.
flags;
15464 #if defined(_WIN32)
15466 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15470 return VK_ERROR_INITIALIZATION_FAILED;
15474 m_File = fopen(settings.
pFilePath,
"wb");
15478 return VK_ERROR_INITIALIZATION_FAILED;
15483 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15484 fprintf(m_File,
"%s\n",
"1,8");
15489 VmaRecorder::~VmaRecorder()
15491 if(m_File != VMA_NULL)
15497 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15499 CallParams callParams;
15500 GetBasicParams(callParams);
15502 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15503 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15507 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15509 CallParams callParams;
15510 GetBasicParams(callParams);
15512 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15513 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15519 CallParams callParams;
15520 GetBasicParams(callParams);
15522 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15523 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15534 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15536 CallParams callParams;
15537 GetBasicParams(callParams);
15539 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15540 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15545 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15546 const VkMemoryRequirements& vkMemReq,
15550 CallParams callParams;
15551 GetBasicParams(callParams);
15553 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15554 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15555 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15557 vkMemReq.alignment,
15558 vkMemReq.memoryTypeBits,
15566 userDataStr.GetString());
15570 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15571 const VkMemoryRequirements& vkMemReq,
15573 uint64_t allocationCount,
15576 CallParams callParams;
15577 GetBasicParams(callParams);
15579 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15580 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15581 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15583 vkMemReq.alignment,
15584 vkMemReq.memoryTypeBits,
15591 PrintPointerList(allocationCount, pAllocations);
15592 fprintf(m_File,
",%s\n", userDataStr.GetString());
15596 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15597 const VkMemoryRequirements& vkMemReq,
15598 bool requiresDedicatedAllocation,
15599 bool prefersDedicatedAllocation,
15603 CallParams callParams;
15604 GetBasicParams(callParams);
15606 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15607 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15608 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15610 vkMemReq.alignment,
15611 vkMemReq.memoryTypeBits,
15612 requiresDedicatedAllocation ? 1 : 0,
15613 prefersDedicatedAllocation ? 1 : 0,
15621 userDataStr.GetString());
15625 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15626 const VkMemoryRequirements& vkMemReq,
15627 bool requiresDedicatedAllocation,
15628 bool prefersDedicatedAllocation,
15632 CallParams callParams;
15633 GetBasicParams(callParams);
15635 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15636 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15637 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15639 vkMemReq.alignment,
15640 vkMemReq.memoryTypeBits,
15641 requiresDedicatedAllocation ? 1 : 0,
15642 prefersDedicatedAllocation ? 1 : 0,
15650 userDataStr.GetString());
15654 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15657 CallParams callParams;
15658 GetBasicParams(callParams);
15660 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15661 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15666 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15667 uint64_t allocationCount,
15670 CallParams callParams;
15671 GetBasicParams(callParams);
15673 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15674 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15675 PrintPointerList(allocationCount, pAllocations);
15676 fprintf(m_File,
"\n");
15680 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15682 const void* pUserData)
15684 CallParams callParams;
15685 GetBasicParams(callParams);
15687 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15688 UserDataString userDataStr(
15691 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15693 userDataStr.GetString());
15697 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15700 CallParams callParams;
15701 GetBasicParams(callParams);
15703 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15704 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15709 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15712 CallParams callParams;
15713 GetBasicParams(callParams);
15715 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15716 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15721 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15724 CallParams callParams;
15725 GetBasicParams(callParams);
15727 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15728 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15733 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15734 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15736 CallParams callParams;
15737 GetBasicParams(callParams);
15739 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15740 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15747 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15748 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15750 CallParams callParams;
15751 GetBasicParams(callParams);
15753 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15754 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15761 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15762 const VkBufferCreateInfo& bufCreateInfo,
15766 CallParams callParams;
15767 GetBasicParams(callParams);
15769 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15770 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15771 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15772 bufCreateInfo.flags,
15773 bufCreateInfo.size,
15774 bufCreateInfo.usage,
15775 bufCreateInfo.sharingMode,
15776 allocCreateInfo.
flags,
15777 allocCreateInfo.
usage,
15781 allocCreateInfo.
pool,
15783 userDataStr.GetString());
15787 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15788 const VkImageCreateInfo& imageCreateInfo,
15792 CallParams callParams;
15793 GetBasicParams(callParams);
15795 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15796 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15797 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15798 imageCreateInfo.flags,
15799 imageCreateInfo.imageType,
15800 imageCreateInfo.format,
15801 imageCreateInfo.extent.width,
15802 imageCreateInfo.extent.height,
15803 imageCreateInfo.extent.depth,
15804 imageCreateInfo.mipLevels,
15805 imageCreateInfo.arrayLayers,
15806 imageCreateInfo.samples,
15807 imageCreateInfo.tiling,
15808 imageCreateInfo.usage,
15809 imageCreateInfo.sharingMode,
15810 imageCreateInfo.initialLayout,
15811 allocCreateInfo.
flags,
15812 allocCreateInfo.
usage,
15816 allocCreateInfo.
pool,
15818 userDataStr.GetString());
15822 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15825 CallParams callParams;
15826 GetBasicParams(callParams);
15828 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15829 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15834 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15837 CallParams callParams;
15838 GetBasicParams(callParams);
15840 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15841 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15846 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15849 CallParams callParams;
15850 GetBasicParams(callParams);
15852 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15853 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15858 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15861 CallParams callParams;
15862 GetBasicParams(callParams);
15864 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15865 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15870 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15873 CallParams callParams;
15874 GetBasicParams(callParams);
15876 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15877 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15882 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15886 CallParams callParams;
15887 GetBasicParams(callParams);
15889 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15890 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15893 fprintf(m_File,
",");
15895 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15905 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15908 CallParams callParams;
15909 GetBasicParams(callParams);
15911 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15912 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15917 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15921 CallParams callParams;
15922 GetBasicParams(callParams);
15924 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15925 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15926 pool, name != VMA_NULL ? name :
"");
15932 if(pUserData != VMA_NULL)
15936 m_Str = (
const char*)pUserData;
15941 snprintf(m_PtrStr, 17,
"%p", pUserData);
15951 void VmaRecorder::WriteConfiguration(
15952 const VkPhysicalDeviceProperties& devProps,
15953 const VkPhysicalDeviceMemoryProperties& memProps,
15954 uint32_t vulkanApiVersion,
15955 bool dedicatedAllocationExtensionEnabled,
15956 bool bindMemory2ExtensionEnabled,
15957 bool memoryBudgetExtensionEnabled,
15958 bool deviceCoherentMemoryExtensionEnabled)
15960 fprintf(m_File,
"Config,Begin\n");
15962 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15964 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15965 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15966 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15967 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15968 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15969 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15971 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15972 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15973 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15975 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15976 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15978 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15979 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15981 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15982 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15984 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15985 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15988 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15989 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15990 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15991 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15993 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15994 fprintf(m_File,
"Macro,VMA_MIN_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_MIN_ALIGNMENT);
15995 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15996 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15997 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15998 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15999 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
16000 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
16001 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
16003 fprintf(m_File,
"Config,End\n");
16006 void VmaRecorder::GetBasicParams(CallParams& outParams)
16008 #if defined(_WIN32)
16009 outParams.threadId = GetCurrentThreadId();
16014 std::thread::id thread_id = std::this_thread::get_id();
16015 std::stringstream thread_id_to_string_converter;
16016 thread_id_to_string_converter << thread_id;
16017 std::string thread_id_as_string = thread_id_to_string_converter.str();
16018 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
16021 auto current_time = std::chrono::high_resolution_clock::now();
16023 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
16026 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
16030 fprintf(m_File,
"%p", pItems[0]);
16031 for(uint64_t i = 1; i < count; ++i)
16033 fprintf(m_File,
" %p", pItems[i]);
16038 void VmaRecorder::Flush()
16051 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
16052 m_Allocator(pAllocationCallbacks, 1024)
16056 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
16058 VmaMutexLock mutexLock(m_Mutex);
16059 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
16062 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
16064 VmaMutexLock mutexLock(m_Mutex);
16065 m_Allocator.Free(hAlloc);
16073 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
16080 m_hDevice(pCreateInfo->device),
16081 m_hInstance(pCreateInfo->instance),
16082 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
16083 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
16084 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
16085 m_AllocationObjectAllocator(&m_AllocationCallbacks),
16086 m_HeapSizeLimitMask(0),
16087 m_DeviceMemoryCount(0),
16088 m_PreferredLargeHeapBlockSize(0),
16089 m_PhysicalDevice(pCreateInfo->physicalDevice),
16090 m_CurrentFrameIndex(0),
16091 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
16093 m_GlobalMemoryTypeBits(UINT32_MAX)
16095 ,m_pRecorder(VMA_NULL)
16098 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16100 m_UseKhrDedicatedAllocation =
false;
16101 m_UseKhrBindMemory2 =
false;
16104 if(VMA_DEBUG_DETECT_CORRUPTION)
16107 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
16112 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
16114 #if !(VMA_DEDICATED_ALLOCATION)
16117 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
16120 #if !(VMA_BIND_MEMORY2)
16123 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
16127 #if !(VMA_MEMORY_BUDGET)
16130 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
16133 #if !(VMA_BUFFER_DEVICE_ADDRESS)
16134 if(m_UseKhrBufferDeviceAddress)
16136 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16139 #if VMA_VULKAN_VERSION < 1002000
16140 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
16142 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
16145 #if VMA_VULKAN_VERSION < 1001000
16146 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16148 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
16151 #if !(VMA_MEMORY_PRIORITY)
16152 if(m_UseExtMemoryPriority)
16154 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16158 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
16159 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
16160 memset(&m_MemProps, 0,
sizeof(m_MemProps));
16162 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
16163 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
16165 #if VMA_EXTERNAL_MEMORY
16166 memset(&m_TypeExternalMemoryHandleTypes, 0,
sizeof(m_TypeExternalMemoryHandleTypes));
16178 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
16179 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
16181 VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT));
16182 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
16183 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
16184 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
16189 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
16191 #if VMA_EXTERNAL_MEMORY
16195 sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount());
16201 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16203 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
16204 if(limit != VK_WHOLE_SIZE)
16206 m_HeapSizeLimitMask |= 1u << heapIndex;
16207 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
16209 m_MemProps.memoryHeaps[heapIndex].size = limit;
16215 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16217 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
16219 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
16223 preferredBlockSize,
16226 GetBufferImageGranularity(),
16231 GetMemoryTypeMinAlignment(memTypeIndex),
16240 VkResult res = VK_SUCCESS;
16245 #if VMA_RECORDING_ENABLED
16246 m_pRecorder = vma_new(
this, VmaRecorder)();
16248 if(res != VK_SUCCESS)
16252 m_pRecorder->WriteConfiguration(
16253 m_PhysicalDeviceProperties,
16255 m_VulkanApiVersion,
16256 m_UseKhrDedicatedAllocation,
16257 m_UseKhrBindMemory2,
16258 m_UseExtMemoryBudget,
16259 m_UseAmdDeviceCoherentMemory);
16260 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
16262 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
16263 return VK_ERROR_FEATURE_NOT_PRESENT;
16267 #if VMA_MEMORY_BUDGET
16268 if(m_UseExtMemoryBudget)
16270 UpdateVulkanBudget();
16277 VmaAllocator_T::~VmaAllocator_T()
16279 #if VMA_RECORDING_ENABLED
16280 if(m_pRecorder != VMA_NULL)
16282 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
16283 vma_delete(
this, m_pRecorder);
16287 VMA_ASSERT(m_Pools.IsEmpty());
16289 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
16291 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
16293 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
16296 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
16300 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
16302 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16303 ImportVulkanFunctions_Static();
16306 if(pVulkanFunctions != VMA_NULL)
16308 ImportVulkanFunctions_Custom(pVulkanFunctions);
16311 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16312 ImportVulkanFunctions_Dynamic();
16315 ValidateVulkanFunctions();
16318 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16320 void VmaAllocator_T::ImportVulkanFunctions_Static()
16323 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
16324 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
16325 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
16326 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
16327 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
16328 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
16329 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
16330 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
16331 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
16332 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
16333 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
16334 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
16335 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
16336 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
16337 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
16338 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
16339 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
16342 #if VMA_VULKAN_VERSION >= 1001000
16343 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16345 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
16346 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
16347 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
16348 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
16349 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
16356 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
16358 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
16360 #define VMA_COPY_IF_NOT_NULL(funcName) \
16361 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
16363 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
16364 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
16365 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
16366 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
16367 VMA_COPY_IF_NOT_NULL(vkMapMemory);
16368 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
16369 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
16370 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
16371 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
16372 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
16373 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
16374 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
16375 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
16376 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
16377 VMA_COPY_IF_NOT_NULL(vkCreateImage);
16378 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
16379 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
16381 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16382 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
16383 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
16386 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16387 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
16388 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
16391 #if VMA_MEMORY_BUDGET
16392 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
16395 #undef VMA_COPY_IF_NOT_NULL
16398 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16400 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
16402 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
16403 if(m_VulkanFunctions.memberName == VMA_NULL) \
16404 m_VulkanFunctions.memberName = \
16405 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
16406 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
16407 if(m_VulkanFunctions.memberName == VMA_NULL) \
16408 m_VulkanFunctions.memberName = \
16409 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
16411 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
16412 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
16413 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
16414 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
16415 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
16416 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
16417 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
16418 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
16419 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
16420 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
16421 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
16422 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
16423 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
16424 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
16425 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
16426 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
16427 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16429 #if VMA_VULKAN_VERSION >= 1001000
16430 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16432 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16433 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16434 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16435 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16436 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16440 #if VMA_DEDICATED_ALLOCATION
16441 if(m_UseKhrDedicatedAllocation)
16443 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16444 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16448 #if VMA_BIND_MEMORY2
16449 if(m_UseKhrBindMemory2)
16451 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16452 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16456 #if VMA_MEMORY_BUDGET
16457 if(m_UseExtMemoryBudget)
16459 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16463 #undef VMA_FETCH_DEVICE_FUNC
16464 #undef VMA_FETCH_INSTANCE_FUNC
16469 void VmaAllocator_T::ValidateVulkanFunctions()
16471 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16472 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16473 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16474 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16475 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16476 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16477 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16478 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16479 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16480 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16481 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16482 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16483 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16484 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16485 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16486 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16487 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16489 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16490 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16492 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16493 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16497 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16498 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16500 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16501 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16505 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16506 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16508 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16513 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16515 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16516 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16517 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16518 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16521 VkResult VmaAllocator_T::AllocateMemoryOfType(
16523 VkDeviceSize alignment,
16524 bool dedicatedAllocation,
16525 VkBuffer dedicatedBuffer,
16526 VkBufferUsageFlags dedicatedBufferUsage,
16527 VkImage dedicatedImage,
16529 uint32_t memTypeIndex,
16530 VmaSuballocationType suballocType,
16531 size_t allocationCount,
16534 VMA_ASSERT(pAllocations != VMA_NULL);
16535 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16541 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16551 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16552 VMA_ASSERT(blockVector);
16554 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16555 bool preferDedicatedMemory =
16556 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16557 dedicatedAllocation ||
16559 size > preferredBlockSize / 2;
16561 if(preferDedicatedMemory &&
16563 finalCreateInfo.
pool == VK_NULL_HANDLE)
16572 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16576 return AllocateDedicatedMemory(
16586 dedicatedBufferUsage,
16594 VkResult res = blockVector->Allocate(
16595 m_CurrentFrameIndex.load(),
16602 if(res == VK_SUCCESS)
16610 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16616 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
16618 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16621 res = AllocateDedicatedMemory(
16631 dedicatedBufferUsage,
16635 if(res == VK_SUCCESS)
16638 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16644 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16650 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16652 VmaSuballocationType suballocType,
16653 uint32_t memTypeIndex,
16656 bool isUserDataString,
16659 VkBuffer dedicatedBuffer,
16660 VkBufferUsageFlags dedicatedBufferUsage,
16661 VkImage dedicatedImage,
16662 size_t allocationCount,
16665 VMA_ASSERT(allocationCount > 0 && pAllocations);
16669 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16671 GetBudget(&heapBudget, heapIndex, 1);
16672 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16674 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16678 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16679 allocInfo.memoryTypeIndex = memTypeIndex;
16680 allocInfo.allocationSize = size;
16682 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16683 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16684 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16686 if(dedicatedBuffer != VK_NULL_HANDLE)
16688 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16689 dedicatedAllocInfo.buffer = dedicatedBuffer;
16690 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16692 else if(dedicatedImage != VK_NULL_HANDLE)
16694 dedicatedAllocInfo.image = dedicatedImage;
16695 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16700 #if VMA_BUFFER_DEVICE_ADDRESS
16701 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16702 if(m_UseKhrBufferDeviceAddress)
16704 bool canContainBufferWithDeviceAddress =
true;
16705 if(dedicatedBuffer != VK_NULL_HANDLE)
16707 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16708 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16710 else if(dedicatedImage != VK_NULL_HANDLE)
16712 canContainBufferWithDeviceAddress =
false;
16714 if(canContainBufferWithDeviceAddress)
16716 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16717 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16722 #if VMA_MEMORY_PRIORITY
16723 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
16724 if(m_UseExtMemoryPriority)
16726 priorityInfo.priority = priority;
16727 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
16731 #if VMA_EXTERNAL_MEMORY
16733 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
16734 exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex);
16735 if(exportMemoryAllocInfo.handleTypes != 0)
16737 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
16742 VkResult res = VK_SUCCESS;
16743 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16745 res = AllocateDedicatedMemoryPage(
16753 pAllocations + allocIndex);
16754 if(res != VK_SUCCESS)
16760 if(res == VK_SUCCESS)
16764 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16765 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
16766 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16768 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
16772 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16777 while(allocIndex--)
16780 VkDeviceMemory hMemory = currAlloc->GetMemory();
16792 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16793 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16794 currAlloc->SetUserData(
this, VMA_NULL);
16795 m_AllocationObjectAllocator.Free(currAlloc);
16798 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16804 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16806 VmaSuballocationType suballocType,
16807 uint32_t memTypeIndex,
16808 const VkMemoryAllocateInfo& allocInfo,
16810 bool isUserDataString,
16814 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16815 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16818 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16822 void* pMappedData = VMA_NULL;
16825 res = (*m_VulkanFunctions.vkMapMemory)(
16834 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16835 FreeVulkanMemory(memTypeIndex, size, hMemory);
16840 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16841 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16842 (*pAllocation)->SetUserData(
this, pUserData);
16843 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16844 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16846 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16852 void VmaAllocator_T::GetBufferMemoryRequirements(
16854 VkMemoryRequirements& memReq,
16855 bool& requiresDedicatedAllocation,
16856 bool& prefersDedicatedAllocation)
const
16858 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16859 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16861 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16862 memReqInfo.buffer = hBuffer;
16864 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16866 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16867 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16869 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16871 memReq = memReq2.memoryRequirements;
16872 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16873 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16878 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16879 requiresDedicatedAllocation =
false;
16880 prefersDedicatedAllocation =
false;
16884 void VmaAllocator_T::GetImageMemoryRequirements(
16886 VkMemoryRequirements& memReq,
16887 bool& requiresDedicatedAllocation,
16888 bool& prefersDedicatedAllocation)
const
16890 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16891 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16893 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16894 memReqInfo.image = hImage;
16896 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16898 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16899 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16901 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16903 memReq = memReq2.memoryRequirements;
16904 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16905 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16910 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16911 requiresDedicatedAllocation =
false;
16912 prefersDedicatedAllocation =
false;
16916 VkResult VmaAllocator_T::AllocateMemory(
16917 const VkMemoryRequirements& vkMemReq,
16918 bool requiresDedicatedAllocation,
16919 bool prefersDedicatedAllocation,
16920 VkBuffer dedicatedBuffer,
16921 VkBufferUsageFlags dedicatedBufferUsage,
16922 VkImage dedicatedImage,
16924 VmaSuballocationType suballocType,
16925 size_t allocationCount,
16928 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16930 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16932 if(vkMemReq.size == 0)
16934 return VK_ERROR_VALIDATION_FAILED_EXT;
16939 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16940 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16945 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16946 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16948 if(requiresDedicatedAllocation)
16952 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16953 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16955 if(createInfo.
pool != VK_NULL_HANDLE)
16957 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16958 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16961 if((createInfo.
pool != VK_NULL_HANDLE) &&
16964 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16965 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16968 if(createInfo.
pool != VK_NULL_HANDLE)
16973 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16978 return createInfo.
pool->m_BlockVector.Allocate(
16979 m_CurrentFrameIndex.load(),
16981 vkMemReq.alignment,
16990 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16991 uint32_t memTypeIndex = UINT32_MAX;
16993 if(res == VK_SUCCESS)
16995 res = AllocateMemoryOfType(
16997 vkMemReq.alignment,
16998 requiresDedicatedAllocation || prefersDedicatedAllocation,
17000 dedicatedBufferUsage,
17008 if(res == VK_SUCCESS)
17018 memoryTypeBits &= ~(1u << memTypeIndex);
17021 if(res == VK_SUCCESS)
17023 res = AllocateMemoryOfType(
17025 vkMemReq.alignment,
17026 requiresDedicatedAllocation || prefersDedicatedAllocation,
17028 dedicatedBufferUsage,
17036 if(res == VK_SUCCESS)
17046 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17057 void VmaAllocator_T::FreeMemory(
17058 size_t allocationCount,
17061 VMA_ASSERT(pAllocations);
17063 for(
size_t allocIndex = allocationCount; allocIndex--; )
17067 if(allocation != VK_NULL_HANDLE)
17069 if(TouchAllocation(allocation))
17071 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
17073 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
17076 switch(allocation->GetType())
17078 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17080 VmaBlockVector* pBlockVector = VMA_NULL;
17081 VmaPool hPool = allocation->GetBlock()->GetParentPool();
17082 if(hPool != VK_NULL_HANDLE)
17084 pBlockVector = &hPool->m_BlockVector;
17088 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17089 pBlockVector = m_pBlockVectors[memTypeIndex];
17091 pBlockVector->Free(allocation);
17094 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17095 FreeDedicatedMemory(allocation);
17103 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
17104 allocation->SetUserData(
this, VMA_NULL);
17105 m_AllocationObjectAllocator.Free(allocation);
17110 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
17113 InitStatInfo(pStats->
total);
17114 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
17116 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
17120 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17122 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17123 VMA_ASSERT(pBlockVector);
17124 pBlockVector->AddStats(pStats);
17129 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17130 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17132 pool->m_BlockVector.AddStats(pStats);
17137 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17139 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
17140 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17141 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17143 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
17146 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
17147 VmaAddStatInfo(pStats->
total, allocationStatInfo);
17148 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
17149 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
17154 VmaPostprocessCalcStatInfo(pStats->
total);
17155 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
17156 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
17157 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
17158 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
17161 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
17163 #if VMA_MEMORY_BUDGET
17164 if(m_UseExtMemoryBudget)
17166 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
17168 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
17169 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17171 const uint32_t heapIndex = firstHeap + i;
17173 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17176 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
17178 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
17179 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17183 outBudget->
usage = 0;
17187 outBudget->
budget = VMA_MIN(
17188 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
17193 UpdateVulkanBudget();
17194 GetBudget(outBudget, firstHeap, heapCount);
17200 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17202 const uint32_t heapIndex = firstHeap + i;
17204 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17208 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17213 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
17215 VkResult VmaAllocator_T::DefragmentationBegin(
17225 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
17226 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
17229 (*pContext)->AddAllocations(
17232 VkResult res = (*pContext)->Defragment(
17237 if(res != VK_NOT_READY)
17239 vma_delete(
this, *pContext);
17240 *pContext = VMA_NULL;
17246 VkResult VmaAllocator_T::DefragmentationEnd(
17249 vma_delete(
this, context);
17253 VkResult VmaAllocator_T::DefragmentationPassBegin(
17257 return context->DefragmentPassBegin(pInfo);
17259 VkResult VmaAllocator_T::DefragmentationPassEnd(
17262 return context->DefragmentPassEnd();
17268 if(hAllocation->CanBecomeLost())
17274 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17275 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17278 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17282 pAllocationInfo->
offset = 0;
17283 pAllocationInfo->
size = hAllocation->GetSize();
17285 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17288 else if(localLastUseFrameIndex == localCurrFrameIndex)
17290 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17291 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17292 pAllocationInfo->
offset = hAllocation->GetOffset();
17293 pAllocationInfo->
size = hAllocation->GetSize();
17295 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17300 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17302 localLastUseFrameIndex = localCurrFrameIndex;
17309 #if VMA_STATS_STRING_ENABLED
17310 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17311 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17314 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17315 if(localLastUseFrameIndex == localCurrFrameIndex)
17321 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17323 localLastUseFrameIndex = localCurrFrameIndex;
17329 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17330 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17331 pAllocationInfo->
offset = hAllocation->GetOffset();
17332 pAllocationInfo->
size = hAllocation->GetSize();
17333 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
17334 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17338 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
17341 if(hAllocation->CanBecomeLost())
17343 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17344 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17347 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17351 else if(localLastUseFrameIndex == localCurrFrameIndex)
17357 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17359 localLastUseFrameIndex = localCurrFrameIndex;
17366 #if VMA_STATS_STRING_ENABLED
17367 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17368 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17371 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17372 if(localLastUseFrameIndex == localCurrFrameIndex)
17378 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17380 localLastUseFrameIndex = localCurrFrameIndex;
17392 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
17408 return VK_ERROR_INITIALIZATION_FAILED;
17412 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
17414 return VK_ERROR_FEATURE_NOT_PRESENT;
17421 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
17423 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
17425 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
17426 if(res != VK_SUCCESS)
17428 vma_delete(
this, *pPool);
17435 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17436 (*pPool)->SetId(m_NextPoolId++);
17437 m_Pools.PushBack(*pPool);
17443 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17447 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17448 m_Pools.Remove(pool);
17451 vma_delete(
this, pool);
17456 pool->m_BlockVector.GetPoolStats(pPoolStats);
17459 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17461 m_CurrentFrameIndex.store(frameIndex);
17463 #if VMA_MEMORY_BUDGET
17464 if(m_UseExtMemoryBudget)
17466 UpdateVulkanBudget();
17471 void VmaAllocator_T::MakePoolAllocationsLost(
17473 size_t* pLostAllocationCount)
17475 hPool->m_BlockVector.MakePoolAllocationsLost(
17476 m_CurrentFrameIndex.load(),
17477 pLostAllocationCount);
17480 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17482 return hPool->m_BlockVector.CheckCorruption();
17485 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17487 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17490 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17492 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17494 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17495 VMA_ASSERT(pBlockVector);
17496 VkResult localRes = pBlockVector->CheckCorruption();
17499 case VK_ERROR_FEATURE_NOT_PRESENT:
17502 finalRes = VK_SUCCESS;
17512 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17513 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17515 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17517 VkResult localRes = pool->m_BlockVector.CheckCorruption();
17520 case VK_ERROR_FEATURE_NOT_PRESENT:
17523 finalRes = VK_SUCCESS;
17535 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17537 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17538 (*pAllocation)->InitLost();
17542 template<
typename T>
17543 struct AtomicTransactionalIncrement
17546 typedef std::atomic<T> AtomicT;
17547 ~AtomicTransactionalIncrement()
17552 T Increment(AtomicT* atomic)
17555 return m_Atomic->fetch_add(1);
17559 m_Atomic =
nullptr;
17563 AtomicT* m_Atomic =
nullptr;
17566 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17568 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
17569 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
17570 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
17571 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
17573 return VK_ERROR_TOO_MANY_OBJECTS;
17577 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17580 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17582 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17583 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17586 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17587 if(blockBytesAfterAllocation > heapSize)
17589 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17591 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17599 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17603 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17605 if(res == VK_SUCCESS)
17607 #if VMA_MEMORY_BUDGET
17608 ++m_Budget.m_OperationsSinceBudgetFetch;
17612 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17614 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17617 deviceMemoryCountIncrement.Commit();
17621 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17627 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17630 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17632 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17636 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17638 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17640 --m_DeviceMemoryCount;
17643 VkResult VmaAllocator_T::BindVulkanBuffer(
17644 VkDeviceMemory memory,
17645 VkDeviceSize memoryOffset,
17649 if(pNext != VMA_NULL)
17651 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17652 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17653 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17655 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17656 bindBufferMemoryInfo.pNext = pNext;
17657 bindBufferMemoryInfo.buffer = buffer;
17658 bindBufferMemoryInfo.memory = memory;
17659 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17660 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17665 return VK_ERROR_EXTENSION_NOT_PRESENT;
17670 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17674 VkResult VmaAllocator_T::BindVulkanImage(
17675 VkDeviceMemory memory,
17676 VkDeviceSize memoryOffset,
17680 if(pNext != VMA_NULL)
17682 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17683 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17684 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17686 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17687 bindBufferMemoryInfo.pNext = pNext;
17688 bindBufferMemoryInfo.image = image;
17689 bindBufferMemoryInfo.memory = memory;
17690 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17691 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17696 return VK_ERROR_EXTENSION_NOT_PRESENT;
17701 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17705 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17707 if(hAllocation->CanBecomeLost())
17709 return VK_ERROR_MEMORY_MAP_FAILED;
17712 switch(hAllocation->GetType())
17714 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17716 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17717 char *pBytes = VMA_NULL;
17718 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17719 if(res == VK_SUCCESS)
17721 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17722 hAllocation->BlockAllocMap();
17726 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17727 return hAllocation->DedicatedAllocMap(
this, ppData);
17730 return VK_ERROR_MEMORY_MAP_FAILED;
17736 switch(hAllocation->GetType())
17738 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17740 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17741 hAllocation->BlockAllocUnmap();
17742 pBlock->Unmap(
this, 1);
17745 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17746 hAllocation->DedicatedAllocUnmap(
this);
17753 VkResult VmaAllocator_T::BindBufferMemory(
17755 VkDeviceSize allocationLocalOffset,
17759 VkResult res = VK_SUCCESS;
17760 switch(hAllocation->GetType())
17762 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17763 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17765 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17767 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17768 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17769 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17778 VkResult VmaAllocator_T::BindImageMemory(
17780 VkDeviceSize allocationLocalOffset,
17784 VkResult res = VK_SUCCESS;
17785 switch(hAllocation->GetType())
17787 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17788 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17790 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17792 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17793 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17794 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17803 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17805 VkDeviceSize offset, VkDeviceSize size,
17806 VMA_CACHE_OPERATION op)
17808 VkResult res = VK_SUCCESS;
17810 VkMappedMemoryRange memRange = {};
17811 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17815 case VMA_CACHE_FLUSH:
17816 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17818 case VMA_CACHE_INVALIDATE:
17819 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17829 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17830 uint32_t allocationCount,
17832 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17833 VMA_CACHE_OPERATION op)
17835 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17836 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17837 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17839 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17842 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17843 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17844 VkMappedMemoryRange newRange;
17845 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17847 ranges.push_back(newRange);
17851 VkResult res = VK_SUCCESS;
17852 if(!ranges.empty())
17856 case VMA_CACHE_FLUSH:
17857 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17859 case VMA_CACHE_INVALIDATE:
17860 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17870 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17872 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17874 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17876 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17877 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
17878 dedicatedAllocations.Remove(allocation);
17881 VkDeviceMemory hMemory = allocation->GetMemory();
17893 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17895 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17898 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17900 VkBufferCreateInfo dummyBufCreateInfo;
17901 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17903 uint32_t memoryTypeBits = 0;
17906 VkBuffer buf = VK_NULL_HANDLE;
17907 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17908 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17909 if(res == VK_SUCCESS)
17912 VkMemoryRequirements memReq;
17913 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17914 memoryTypeBits = memReq.memoryTypeBits;
17917 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17920 return memoryTypeBits;
17923 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17926 VMA_ASSERT(GetMemoryTypeCount() > 0);
17928 uint32_t memoryTypeBits = UINT32_MAX;
17930 if(!m_UseAmdDeviceCoherentMemory)
17933 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17935 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17937 memoryTypeBits &= ~(1u << memTypeIndex);
17942 return memoryTypeBits;
17945 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17947 VkDeviceSize offset, VkDeviceSize size,
17948 VkMappedMemoryRange& outRange)
const
17950 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17951 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17953 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17954 const VkDeviceSize allocationSize = allocation->GetSize();
17955 VMA_ASSERT(offset <= allocationSize);
17957 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17958 outRange.pNext = VMA_NULL;
17959 outRange.memory = allocation->GetMemory();
17961 switch(allocation->GetType())
17963 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17964 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17965 if(size == VK_WHOLE_SIZE)
17967 outRange.size = allocationSize - outRange.offset;
17971 VMA_ASSERT(offset + size <= allocationSize);
17972 outRange.size = VMA_MIN(
17973 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17974 allocationSize - outRange.offset);
17977 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17980 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17981 if(size == VK_WHOLE_SIZE)
17983 size = allocationSize - offset;
17987 VMA_ASSERT(offset + size <= allocationSize);
17989 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17992 const VkDeviceSize allocationOffset = allocation->GetOffset();
17993 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17994 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17995 outRange.offset += allocationOffset;
17996 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
18008 #if VMA_MEMORY_BUDGET
18010 void VmaAllocator_T::UpdateVulkanBudget()
18012 VMA_ASSERT(m_UseExtMemoryBudget);
18014 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
18016 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
18017 VmaPnextChainPushFront(&memProps, &budgetProps);
18019 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
18022 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
18024 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
18026 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
18027 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
18028 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
18031 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
18033 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
18035 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
18037 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
18039 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
18041 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
18044 m_Budget.m_OperationsSinceBudgetFetch = 0;
18050 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
18052 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
18053 !hAllocation->CanBecomeLost() &&
18054 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18056 void* pData = VMA_NULL;
18057 VkResult res = Map(hAllocation, &pData);
18058 if(res == VK_SUCCESS)
18060 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
18061 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
18062 Unmap(hAllocation);
18066 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
18071 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
18073 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
18074 if(memoryTypeBits == UINT32_MAX)
18076 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
18077 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
18079 return memoryTypeBits;
18082 #if VMA_STATS_STRING_ENABLED
18084 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
18086 bool dedicatedAllocationsStarted =
false;
18087 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18089 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
18090 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
18091 if(!dedicatedAllocList.IsEmpty())
18093 if(dedicatedAllocationsStarted ==
false)
18095 dedicatedAllocationsStarted =
true;
18096 json.WriteString(
"DedicatedAllocations");
18097 json.BeginObject();
18100 json.BeginString(
"Type ");
18101 json.ContinueString(memTypeIndex);
18107 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
18109 json.BeginObject(
true);
18110 alloc->PrintParameters(json);
18117 if(dedicatedAllocationsStarted)
18123 bool allocationsStarted =
false;
18124 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18126 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
18128 if(allocationsStarted ==
false)
18130 allocationsStarted =
true;
18131 json.WriteString(
"DefaultPools");
18132 json.BeginObject();
18135 json.BeginString(
"Type ");
18136 json.ContinueString(memTypeIndex);
18139 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
18142 if(allocationsStarted)
18150 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
18151 if(!m_Pools.IsEmpty())
18153 json.WriteString(
"Pools");
18154 json.BeginObject();
18155 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
18157 json.BeginString();
18158 json.ContinueString(pool->GetId());
18161 pool->m_BlockVector.PrintDetailedMap(json);
18177 VMA_ASSERT(pCreateInfo && pAllocator);
18180 VMA_DEBUG_LOG(
"vmaCreateAllocator");
18182 return (*pAllocator)->Init(pCreateInfo);
18188 if(allocator != VK_NULL_HANDLE)
18190 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
18191 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
18192 vma_delete(&allocationCallbacks, allocator);
18198 VMA_ASSERT(allocator && pAllocatorInfo);
18199 pAllocatorInfo->
instance = allocator->m_hInstance;
18200 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
18201 pAllocatorInfo->
device = allocator->m_hDevice;
18206 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
18208 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
18209 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
18214 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
18216 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
18217 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
18222 uint32_t memoryTypeIndex,
18223 VkMemoryPropertyFlags* pFlags)
18225 VMA_ASSERT(allocator && pFlags);
18226 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
18227 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
18232 uint32_t frameIndex)
18234 VMA_ASSERT(allocator);
18235 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
18237 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18239 allocator->SetCurrentFrameIndex(frameIndex);
18246 VMA_ASSERT(allocator && pStats);
18247 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18248 allocator->CalculateStats(pStats);
18255 VMA_ASSERT(allocator && pBudget);
18256 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18257 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
18260 #if VMA_STATS_STRING_ENABLED
18264 char** ppStatsString,
18265 VkBool32 detailedMap)
18267 VMA_ASSERT(allocator && ppStatsString);
18268 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18270 VmaStringBuilder sb(allocator);
18272 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
18273 json.BeginObject();
18276 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
18279 allocator->CalculateStats(&stats);
18281 json.WriteString(
"Total");
18282 VmaPrintStatInfo(json, stats.
total);
18284 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
18286 json.BeginString(
"Heap ");
18287 json.ContinueString(heapIndex);
18289 json.BeginObject();
18291 json.WriteString(
"Size");
18292 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
18294 json.WriteString(
"Flags");
18295 json.BeginArray(
true);
18296 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
18298 json.WriteString(
"DEVICE_LOCAL");
18302 json.WriteString(
"Budget");
18303 json.BeginObject();
18305 json.WriteString(
"BlockBytes");
18306 json.WriteNumber(budget[heapIndex].blockBytes);
18307 json.WriteString(
"AllocationBytes");
18308 json.WriteNumber(budget[heapIndex].allocationBytes);
18309 json.WriteString(
"Usage");
18310 json.WriteNumber(budget[heapIndex].usage);
18311 json.WriteString(
"Budget");
18312 json.WriteNumber(budget[heapIndex].budget);
18318 json.WriteString(
"Stats");
18319 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
18322 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
18324 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
18326 json.BeginString(
"Type ");
18327 json.ContinueString(typeIndex);
18330 json.BeginObject();
18332 json.WriteString(
"Flags");
18333 json.BeginArray(
true);
18334 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
18335 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
18337 json.WriteString(
"DEVICE_LOCAL");
18339 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18341 json.WriteString(
"HOST_VISIBLE");
18343 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
18345 json.WriteString(
"HOST_COHERENT");
18347 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
18349 json.WriteString(
"HOST_CACHED");
18351 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
18353 json.WriteString(
"LAZILY_ALLOCATED");
18355 #if VMA_VULKAN_VERSION >= 1001000
18356 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
18358 json.WriteString(
"PROTECTED");
18361 #if VK_AMD_device_coherent_memory
18362 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
18364 json.WriteString(
"DEVICE_COHERENT");
18366 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
18368 json.WriteString(
"DEVICE_UNCACHED");
18375 json.WriteString(
"Stats");
18376 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
18385 if(detailedMap == VK_TRUE)
18387 allocator->PrintDetailedMap(json);
18393 const size_t len = sb.GetLength();
18394 char*
const pChars = vma_new_array(allocator,
char, len + 1);
18397 memcpy(pChars, sb.GetData(), len);
18399 pChars[len] =
'\0';
18400 *ppStatsString = pChars;
18405 char* pStatsString)
18407 if(pStatsString != VMA_NULL)
18409 VMA_ASSERT(allocator);
18410 size_t len = strlen(pStatsString);
18411 vma_delete_array(allocator, pStatsString, len + 1);
18422 uint32_t memoryTypeBits,
18424 uint32_t* pMemoryTypeIndex)
18426 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18427 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18428 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18430 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
18437 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
18438 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
18439 uint32_t notPreferredFlags = 0;
18442 switch(pAllocationCreateInfo->
usage)
18447 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18449 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18453 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
18456 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18457 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18459 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18463 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18464 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
18467 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18470 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18479 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18481 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18484 *pMemoryTypeIndex = UINT32_MAX;
18485 uint32_t minCost = UINT32_MAX;
18486 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18487 memTypeIndex < allocator->GetMemoryTypeCount();
18488 ++memTypeIndex, memTypeBit <<= 1)
18491 if((memTypeBit & memoryTypeBits) != 0)
18493 const VkMemoryPropertyFlags currFlags =
18494 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18496 if((requiredFlags & ~currFlags) == 0)
18499 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18500 VmaCountBitsSet(currFlags & notPreferredFlags);
18502 if(currCost < minCost)
18504 *pMemoryTypeIndex = memTypeIndex;
18509 minCost = currCost;
18514 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18519 const VkBufferCreateInfo* pBufferCreateInfo,
18521 uint32_t* pMemoryTypeIndex)
18523 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18524 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18525 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18526 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18528 const VkDevice hDev = allocator->m_hDevice;
18529 VkBuffer hBuffer = VK_NULL_HANDLE;
18530 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18531 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18532 if(res == VK_SUCCESS)
18534 VkMemoryRequirements memReq = {};
18535 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18536 hDev, hBuffer, &memReq);
18540 memReq.memoryTypeBits,
18541 pAllocationCreateInfo,
18544 allocator->GetVulkanFunctions().vkDestroyBuffer(
18545 hDev, hBuffer, allocator->GetAllocationCallbacks());
18552 const VkImageCreateInfo* pImageCreateInfo,
18554 uint32_t* pMemoryTypeIndex)
18556 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18557 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18558 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18559 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18561 const VkDevice hDev = allocator->m_hDevice;
18562 VkImage hImage = VK_NULL_HANDLE;
18563 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18564 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18565 if(res == VK_SUCCESS)
18567 VkMemoryRequirements memReq = {};
18568 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18569 hDev, hImage, &memReq);
18573 memReq.memoryTypeBits,
18574 pAllocationCreateInfo,
18577 allocator->GetVulkanFunctions().vkDestroyImage(
18578 hDev, hImage, allocator->GetAllocationCallbacks());
18588 VMA_ASSERT(allocator && pCreateInfo && pPool);
18590 VMA_DEBUG_LOG(
"vmaCreatePool");
18592 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18594 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18596 #if VMA_RECORDING_ENABLED
18597 if(allocator->GetRecorder() != VMA_NULL)
18599 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18610 VMA_ASSERT(allocator);
18612 if(pool == VK_NULL_HANDLE)
18617 VMA_DEBUG_LOG(
"vmaDestroyPool");
18619 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18621 #if VMA_RECORDING_ENABLED
18622 if(allocator->GetRecorder() != VMA_NULL)
18624 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18628 allocator->DestroyPool(pool);
18636 VMA_ASSERT(allocator && pool && pPoolStats);
18638 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18640 allocator->GetPoolStats(pool, pPoolStats);
18646 size_t* pLostAllocationCount)
18648 VMA_ASSERT(allocator && pool);
18650 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18652 #if VMA_RECORDING_ENABLED
18653 if(allocator->GetRecorder() != VMA_NULL)
18655 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18659 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18664 VMA_ASSERT(allocator && pool);
18666 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18668 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18670 return allocator->CheckPoolCorruption(pool);
18676 const char** ppName)
18678 VMA_ASSERT(allocator && pool && ppName);
18680 VMA_DEBUG_LOG(
"vmaGetPoolName");
18682 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18684 *ppName = pool->GetName();
18692 VMA_ASSERT(allocator && pool);
18694 VMA_DEBUG_LOG(
"vmaSetPoolName");
18696 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18698 pool->SetName(pName);
18700 #if VMA_RECORDING_ENABLED
18701 if(allocator->GetRecorder() != VMA_NULL)
18703 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18710 const VkMemoryRequirements* pVkMemoryRequirements,
18715 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18717 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18719 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18721 VkResult result = allocator->AllocateMemory(
18722 *pVkMemoryRequirements,
18729 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18733 #if VMA_RECORDING_ENABLED
18734 if(allocator->GetRecorder() != VMA_NULL)
18736 allocator->GetRecorder()->RecordAllocateMemory(
18737 allocator->GetCurrentFrameIndex(),
18738 *pVkMemoryRequirements,
18744 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18746 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18754 const VkMemoryRequirements* pVkMemoryRequirements,
18756 size_t allocationCount,
18760 if(allocationCount == 0)
18765 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18767 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18769 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18771 VkResult result = allocator->AllocateMemory(
18772 *pVkMemoryRequirements,
18779 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18783 #if VMA_RECORDING_ENABLED
18784 if(allocator->GetRecorder() != VMA_NULL)
18786 allocator->GetRecorder()->RecordAllocateMemoryPages(
18787 allocator->GetCurrentFrameIndex(),
18788 *pVkMemoryRequirements,
18790 (uint64_t)allocationCount,
18795 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18797 for(
size_t i = 0; i < allocationCount; ++i)
18799 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18813 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18815 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18817 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18819 VkMemoryRequirements vkMemReq = {};
18820 bool requiresDedicatedAllocation =
false;
18821 bool prefersDedicatedAllocation =
false;
18822 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18823 requiresDedicatedAllocation,
18824 prefersDedicatedAllocation);
18826 VkResult result = allocator->AllocateMemory(
18828 requiresDedicatedAllocation,
18829 prefersDedicatedAllocation,
18834 VMA_SUBALLOCATION_TYPE_BUFFER,
18838 #if VMA_RECORDING_ENABLED
18839 if(allocator->GetRecorder() != VMA_NULL)
18841 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18842 allocator->GetCurrentFrameIndex(),
18844 requiresDedicatedAllocation,
18845 prefersDedicatedAllocation,
18851 if(pAllocationInfo && result == VK_SUCCESS)
18853 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18866 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18868 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18870 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18872 VkMemoryRequirements vkMemReq = {};
18873 bool requiresDedicatedAllocation =
false;
18874 bool prefersDedicatedAllocation =
false;
18875 allocator->GetImageMemoryRequirements(image, vkMemReq,
18876 requiresDedicatedAllocation, prefersDedicatedAllocation);
18878 VkResult result = allocator->AllocateMemory(
18880 requiresDedicatedAllocation,
18881 prefersDedicatedAllocation,
18886 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18890 #if VMA_RECORDING_ENABLED
18891 if(allocator->GetRecorder() != VMA_NULL)
18893 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18894 allocator->GetCurrentFrameIndex(),
18896 requiresDedicatedAllocation,
18897 prefersDedicatedAllocation,
18903 if(pAllocationInfo && result == VK_SUCCESS)
18905 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18915 VMA_ASSERT(allocator);
18917 if(allocation == VK_NULL_HANDLE)
18922 VMA_DEBUG_LOG(
"vmaFreeMemory");
18924 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18926 #if VMA_RECORDING_ENABLED
18927 if(allocator->GetRecorder() != VMA_NULL)
18929 allocator->GetRecorder()->RecordFreeMemory(
18930 allocator->GetCurrentFrameIndex(),
18935 allocator->FreeMemory(
18942 size_t allocationCount,
18945 if(allocationCount == 0)
18950 VMA_ASSERT(allocator);
18952 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18954 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18956 #if VMA_RECORDING_ENABLED
18957 if(allocator->GetRecorder() != VMA_NULL)
18959 allocator->GetRecorder()->RecordFreeMemoryPages(
18960 allocator->GetCurrentFrameIndex(),
18961 (uint64_t)allocationCount,
18966 allocator->FreeMemory(allocationCount, pAllocations);
18974 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18976 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18978 #if VMA_RECORDING_ENABLED
18979 if(allocator->GetRecorder() != VMA_NULL)
18981 allocator->GetRecorder()->RecordGetAllocationInfo(
18982 allocator->GetCurrentFrameIndex(),
18987 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18994 VMA_ASSERT(allocator && allocation);
18996 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18998 #if VMA_RECORDING_ENABLED
18999 if(allocator->GetRecorder() != VMA_NULL)
19001 allocator->GetRecorder()->RecordTouchAllocation(
19002 allocator->GetCurrentFrameIndex(),
19007 return allocator->TouchAllocation(allocation);
19015 VMA_ASSERT(allocator && allocation);
19017 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19019 allocation->SetUserData(allocator, pUserData);
19021 #if VMA_RECORDING_ENABLED
19022 if(allocator->GetRecorder() != VMA_NULL)
19024 allocator->GetRecorder()->RecordSetAllocationUserData(
19025 allocator->GetCurrentFrameIndex(),
19036 VMA_ASSERT(allocator && pAllocation);
19038 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
19040 allocator->CreateLostAllocation(pAllocation);
19042 #if VMA_RECORDING_ENABLED
19043 if(allocator->GetRecorder() != VMA_NULL)
19045 allocator->GetRecorder()->RecordCreateLostAllocation(
19046 allocator->GetCurrentFrameIndex(),
19057 VMA_ASSERT(allocator && allocation && ppData);
19059 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19061 VkResult res = allocator->Map(allocation, ppData);
19063 #if VMA_RECORDING_ENABLED
19064 if(allocator->GetRecorder() != VMA_NULL)
19066 allocator->GetRecorder()->RecordMapMemory(
19067 allocator->GetCurrentFrameIndex(),
19079 VMA_ASSERT(allocator && allocation);
19081 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19083 #if VMA_RECORDING_ENABLED
19084 if(allocator->GetRecorder() != VMA_NULL)
19086 allocator->GetRecorder()->RecordUnmapMemory(
19087 allocator->GetCurrentFrameIndex(),
19092 allocator->Unmap(allocation);
19097 VMA_ASSERT(allocator && allocation);
19099 VMA_DEBUG_LOG(
"vmaFlushAllocation");
19101 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19103 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
19105 #if VMA_RECORDING_ENABLED
19106 if(allocator->GetRecorder() != VMA_NULL)
19108 allocator->GetRecorder()->RecordFlushAllocation(
19109 allocator->GetCurrentFrameIndex(),
19110 allocation, offset, size);
19119 VMA_ASSERT(allocator && allocation);
19121 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
19123 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19125 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
19127 #if VMA_RECORDING_ENABLED
19128 if(allocator->GetRecorder() != VMA_NULL)
19130 allocator->GetRecorder()->RecordInvalidateAllocation(
19131 allocator->GetCurrentFrameIndex(),
19132 allocation, offset, size);
19141 uint32_t allocationCount,
19143 const VkDeviceSize* offsets,
19144 const VkDeviceSize* sizes)
19146 VMA_ASSERT(allocator);
19148 if(allocationCount == 0)
19153 VMA_ASSERT(allocations);
19155 VMA_DEBUG_LOG(
"vmaFlushAllocations");
19157 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19159 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
19161 #if VMA_RECORDING_ENABLED
19162 if(allocator->GetRecorder() != VMA_NULL)
19173 uint32_t allocationCount,
19175 const VkDeviceSize* offsets,
19176 const VkDeviceSize* sizes)
19178 VMA_ASSERT(allocator);
19180 if(allocationCount == 0)
19185 VMA_ASSERT(allocations);
19187 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
19189 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19191 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
19193 #if VMA_RECORDING_ENABLED
19194 if(allocator->GetRecorder() != VMA_NULL)
19205 VMA_ASSERT(allocator);
19207 VMA_DEBUG_LOG(
"vmaCheckCorruption");
19209 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19211 return allocator->CheckCorruption(memoryTypeBits);
19217 size_t allocationCount,
19218 VkBool32* pAllocationsChanged,
19228 if(pDefragmentationInfo != VMA_NULL)
19242 if(res == VK_NOT_READY)
19255 VMA_ASSERT(allocator && pInfo && pContext);
19266 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
19268 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
19270 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19272 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
19274 #if VMA_RECORDING_ENABLED
19275 if(allocator->GetRecorder() != VMA_NULL)
19277 allocator->GetRecorder()->RecordDefragmentationBegin(
19278 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
19289 VMA_ASSERT(allocator);
19291 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
19293 if(context != VK_NULL_HANDLE)
19295 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19297 #if VMA_RECORDING_ENABLED
19298 if(allocator->GetRecorder() != VMA_NULL)
19300 allocator->GetRecorder()->RecordDefragmentationEnd(
19301 allocator->GetCurrentFrameIndex(), context);
19305 return allocator->DefragmentationEnd(context);
19319 VMA_ASSERT(allocator);
19322 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
19324 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19326 if(context == VK_NULL_HANDLE)
19332 return allocator->DefragmentationPassBegin(pInfo, context);
19338 VMA_ASSERT(allocator);
19340 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
19341 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19343 if(context == VK_NULL_HANDLE)
19346 return allocator->DefragmentationPassEnd(context);
19354 VMA_ASSERT(allocator && allocation && buffer);
19356 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
19358 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19360 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
19366 VkDeviceSize allocationLocalOffset,
19370 VMA_ASSERT(allocator && allocation && buffer);
19372 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
19374 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19376 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
19384 VMA_ASSERT(allocator && allocation && image);
19386 VMA_DEBUG_LOG(
"vmaBindImageMemory");
19388 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19390 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
19396 VkDeviceSize allocationLocalOffset,
19400 VMA_ASSERT(allocator && allocation && image);
19402 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
19404 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19406 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
19411 const VkBufferCreateInfo* pBufferCreateInfo,
19417 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
19419 if(pBufferCreateInfo->size == 0)
19421 return VK_ERROR_VALIDATION_FAILED_EXT;
19423 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19424 !allocator->m_UseKhrBufferDeviceAddress)
19426 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19427 return VK_ERROR_VALIDATION_FAILED_EXT;
19430 VMA_DEBUG_LOG(
"vmaCreateBuffer");
19432 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19434 *pBuffer = VK_NULL_HANDLE;
19435 *pAllocation = VK_NULL_HANDLE;
19438 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19439 allocator->m_hDevice,
19441 allocator->GetAllocationCallbacks(),
19446 VkMemoryRequirements vkMemReq = {};
19447 bool requiresDedicatedAllocation =
false;
19448 bool prefersDedicatedAllocation =
false;
19449 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19450 requiresDedicatedAllocation, prefersDedicatedAllocation);
19453 res = allocator->AllocateMemory(
19455 requiresDedicatedAllocation,
19456 prefersDedicatedAllocation,
19458 pBufferCreateInfo->usage,
19460 *pAllocationCreateInfo,
19461 VMA_SUBALLOCATION_TYPE_BUFFER,
19465 #if VMA_RECORDING_ENABLED
19466 if(allocator->GetRecorder() != VMA_NULL)
19468 allocator->GetRecorder()->RecordCreateBuffer(
19469 allocator->GetCurrentFrameIndex(),
19470 *pBufferCreateInfo,
19471 *pAllocationCreateInfo,
19481 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19486 #if VMA_STATS_STRING_ENABLED
19487 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19489 if(pAllocationInfo != VMA_NULL)
19491 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19496 allocator->FreeMemory(
19499 *pAllocation = VK_NULL_HANDLE;
19500 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19501 *pBuffer = VK_NULL_HANDLE;
19504 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19505 *pBuffer = VK_NULL_HANDLE;
19513 const VkBufferCreateInfo* pBufferCreateInfo,
19515 VkDeviceSize minAlignment,
19520 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation);
19522 if(pBufferCreateInfo->size == 0)
19524 return VK_ERROR_VALIDATION_FAILED_EXT;
19526 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19527 !allocator->m_UseKhrBufferDeviceAddress)
19529 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19530 return VK_ERROR_VALIDATION_FAILED_EXT;
19533 VMA_DEBUG_LOG(
"vmaCreateBufferWithAlignment");
19535 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19537 *pBuffer = VK_NULL_HANDLE;
19538 *pAllocation = VK_NULL_HANDLE;
19541 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19542 allocator->m_hDevice,
19544 allocator->GetAllocationCallbacks(),
19549 VkMemoryRequirements vkMemReq = {};
19550 bool requiresDedicatedAllocation =
false;
19551 bool prefersDedicatedAllocation =
false;
19552 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19553 requiresDedicatedAllocation, prefersDedicatedAllocation);
19556 vkMemReq.alignment = VMA_MAX(vkMemReq.alignment, minAlignment);
19559 res = allocator->AllocateMemory(
19561 requiresDedicatedAllocation,
19562 prefersDedicatedAllocation,
19564 pBufferCreateInfo->usage,
19566 *pAllocationCreateInfo,
19567 VMA_SUBALLOCATION_TYPE_BUFFER,
19571 #if VMA_RECORDING_ENABLED
19572 if(allocator->GetRecorder() != VMA_NULL)
19574 VMA_ASSERT(0 &&
"Not implemented.");
19583 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19588 #if VMA_STATS_STRING_ENABLED
19589 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19591 if(pAllocationInfo != VMA_NULL)
19593 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19598 allocator->FreeMemory(
19601 *pAllocation = VK_NULL_HANDLE;
19602 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19603 *pBuffer = VK_NULL_HANDLE;
19606 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19607 *pBuffer = VK_NULL_HANDLE;
19618 VMA_ASSERT(allocator);
19620 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19625 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19627 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19629 #if VMA_RECORDING_ENABLED
19630 if(allocator->GetRecorder() != VMA_NULL)
19632 allocator->GetRecorder()->RecordDestroyBuffer(
19633 allocator->GetCurrentFrameIndex(),
19638 if(buffer != VK_NULL_HANDLE)
19640 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19643 if(allocation != VK_NULL_HANDLE)
19645 allocator->FreeMemory(
19653 const VkImageCreateInfo* pImageCreateInfo,
19659 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19661 if(pImageCreateInfo->extent.width == 0 ||
19662 pImageCreateInfo->extent.height == 0 ||
19663 pImageCreateInfo->extent.depth == 0 ||
19664 pImageCreateInfo->mipLevels == 0 ||
19665 pImageCreateInfo->arrayLayers == 0)
19667 return VK_ERROR_VALIDATION_FAILED_EXT;
19670 VMA_DEBUG_LOG(
"vmaCreateImage");
19672 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19674 *pImage = VK_NULL_HANDLE;
19675 *pAllocation = VK_NULL_HANDLE;
19678 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19679 allocator->m_hDevice,
19681 allocator->GetAllocationCallbacks(),
19685 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19686 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19687 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19690 VkMemoryRequirements vkMemReq = {};
19691 bool requiresDedicatedAllocation =
false;
19692 bool prefersDedicatedAllocation =
false;
19693 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19694 requiresDedicatedAllocation, prefersDedicatedAllocation);
19696 res = allocator->AllocateMemory(
19698 requiresDedicatedAllocation,
19699 prefersDedicatedAllocation,
19703 *pAllocationCreateInfo,
19708 #if VMA_RECORDING_ENABLED
19709 if(allocator->GetRecorder() != VMA_NULL)
19711 allocator->GetRecorder()->RecordCreateImage(
19712 allocator->GetCurrentFrameIndex(),
19714 *pAllocationCreateInfo,
19724 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19729 #if VMA_STATS_STRING_ENABLED
19730 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19732 if(pAllocationInfo != VMA_NULL)
19734 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19739 allocator->FreeMemory(
19742 *pAllocation = VK_NULL_HANDLE;
19743 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19744 *pImage = VK_NULL_HANDLE;
19747 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19748 *pImage = VK_NULL_HANDLE;
19759 VMA_ASSERT(allocator);
19761 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19766 VMA_DEBUG_LOG(
"vmaDestroyImage");
19768 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19770 #if VMA_RECORDING_ENABLED
19771 if(allocator->GetRecorder() != VMA_NULL)
19773 allocator->GetRecorder()->RecordDestroyImage(
19774 allocator->GetCurrentFrameIndex(),
19779 if(image != VK_NULL_HANDLE)
19781 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19783 if(allocation != VK_NULL_HANDLE)
19785 allocator->FreeMemory(
Definition: vk_mem_alloc.h:2897
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2923
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2929
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2915
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2936
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2910
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:2943
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2905
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2899
Represents single memory allocation.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:3264
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:3288
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:3308
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:3269
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:3299
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:3313
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:3278
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:2419
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:2424
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2450
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:2475
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:2421
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null.
Definition: vk_mem_alloc.h:2481
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:2433
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2493
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:2430
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:2488
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:2427
uint32_t vulkanApiVersion
Optional. The highest version of Vulkan that the application is designed to use.
Definition: vk_mem_alloc.h:2502
const VkExternalMemoryHandleTypeFlagsKHR * pTypeExternalMemoryHandleTypes
Either null or a pointer to an array of external memory handle types for each Vulkan memory type.
Definition: vk_mem_alloc.h:2513
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:2436
Represents main object of this library initialized.
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:2529
VkDevice device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:2544
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2534
VkPhysicalDevice physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:2539
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
Definition: vk_mem_alloc.h:2635
VkDeviceSize blockBytes
Sum size of all VkDeviceMemory blocks allocated from particular heap, in bytes.
Definition: vk_mem_alloc.h:2638
VkDeviceSize allocationBytes
Sum size of all allocations created in particular heap, in bytes.
Definition: vk_mem_alloc.h:2649
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:2659
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:2670
Represents Opaque object that represents started defragmentation process.
Parameters for defragmentation.
Definition: vk_mem_alloc.h:3663
const VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:3703
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:3669
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:3723
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3718
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:3666
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:3684
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:3687
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:3732
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:3713
const VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:3678
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3708
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:3754
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:3764
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3759
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:3745
uint32_t moveCount
Definition: vk_mem_alloc.h:3746
VmaDefragmentationPassMoveInfo * pMoves
Definition: vk_mem_alloc.h:3747
Definition: vk_mem_alloc.h:3735
VkDeviceMemory memory
Definition: vk_mem_alloc.h:3737
VkDeviceSize offset
Definition: vk_mem_alloc.h:3738
VmaAllocation allocation
Definition: vk_mem_alloc.h:3736
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:3768
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:3776
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3770
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:3772
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:3774
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:2228
void * pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:2234
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:2230
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:2232
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:3065
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:3113
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:3068
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:3071
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:3107
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:3080
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:3085
VkDeviceSize minAllocationAlignment
Additional minimum alignment to be used for all allocations created from this pool....
Definition: vk_mem_alloc.h:3120
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:3093
void * pMemoryAllocateNext
Additional pNext chain to be attached to VkMemoryAllocateInfo used for every allocation made by this ...
Definition: vk_mem_alloc.h:3130
Represents custom memory pool.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:3135
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:3138
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:3157
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:3154
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:3144
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3141
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3147
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:2404
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:2414
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:2406
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:2596
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2607
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2607
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2606
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2608
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:2600
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2608
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2604
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:2598
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2607
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:2602
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2608
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2613
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2615
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2614
VmaStatInfo total
Definition: vk_mem_alloc.h:2616
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:2358
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:2368
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:2373
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:2361
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:2365
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:2370
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:2362
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:2369
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:2366
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:2360
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:2359
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:2372
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:2374
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:2367
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:2363
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:2364
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:2375
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:2371
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:2214
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VkResult vmaEndDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context)
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:2026
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:3061
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:2390
@ VMA_RECORD_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2398
@ VMA_RECORD_FLUSH_AFTER_CALL_BIT
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:2396
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:2238
@ VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
Definition: vk_mem_alloc.h:2313
@ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:2243
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT
Definition: vk_mem_alloc.h:2295
@ VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
Definition: vk_mem_alloc.h:2331
@ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT
Definition: vk_mem_alloc.h:2283
@ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:2268
@ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2350
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT
Definition: vk_mem_alloc.h:2348
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2894
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
void vmaFreeMemory(VmaAllocator allocator, const VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:3653
@ VMA_DEFRAGMENTATION_FLAG_INCREMENTAL
Definition: vk_mem_alloc.h:3654
@ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3655
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
struct VmaDefragmentationPassInfo VmaDefragmentationPassInfo
Parameters for incremental defragmentation steps.
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:2207
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, const VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:3657
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:3005
@ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3040
@ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3059
@ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3051
@ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:3023
@ VMA_POOL_CREATE_ALGORITHM_MASK
Definition: vk_mem_alloc.h:3055
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkResult vmaDefragment(VmaAllocator allocator, const VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
VkResult vmaCreateBufferWithAlignment(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkDeviceSize minAlignment, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Creates a buffer with additional minimum alignment.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
VmaMemoryUsage
Definition: vk_mem_alloc.h:2718
@ VMA_MEMORY_USAGE_MAX_ENUM
Definition: vk_mem_alloc.h:2781
@ VMA_MEMORY_USAGE_CPU_ONLY
Definition: vk_mem_alloc.h:2749
@ VMA_MEMORY_USAGE_CPU_COPY
Definition: vk_mem_alloc.h:2771
@ VMA_MEMORY_USAGE_GPU_TO_CPU
Definition: vk_mem_alloc.h:2765
@ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED
Definition: vk_mem_alloc.h:2779
@ VMA_MEMORY_USAGE_CPU_TO_GPU
Definition: vk_mem_alloc.h:2756
@ VMA_MEMORY_USAGE_GPU_ONLY
Definition: vk_mem_alloc.h:2739
@ VMA_MEMORY_USAGE_UNKNOWN
Definition: vk_mem_alloc.h:2722
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
VkResult vmaInvalidateAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Invalidates memory of given set of allocations.
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
VkResult vmaBeginDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context, VmaDefragmentationPassInfo *pInfo)
VkResult vmaFlushAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Flushes memory of given set of allocations.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:2352
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2785
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Definition: vk_mem_alloc.h:2880
@ VMA_ALLOCATION_CREATE_MAPPED_BIT
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2816
@ VMA_ALLOCATION_CREATE_DONT_BIND_BIT
Definition: vk_mem_alloc.h:2853
@ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT
Definition: vk_mem_alloc.h:2873
@ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2792
@ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Definition: vk_mem_alloc.h:2847
@ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
Definition: vk_mem_alloc.h:2829
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT
Definition: vk_mem_alloc.h:2883
@ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
Definition: vk_mem_alloc.h:2836
@ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT
Definition: vk_mem_alloc.h:2862
@ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2803
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Definition: vk_mem_alloc.h:2877
@ VMA_ALLOCATION_CREATE_STRATEGY_MASK
Definition: vk_mem_alloc.h:2887
@ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT
Definition: vk_mem_alloc.h:2842
@ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT
Definition: vk_mem_alloc.h:2857
@ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT
Definition: vk_mem_alloc.h:2866
@ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2892
void vmaSetPoolName(VmaAllocator allocator, VmaPool pool, const char *pName)
Sets name of a custom pool.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
void vmaGetBudget(VmaAllocator allocator, VmaBudget *pBudget)
Retrieves information about current memory budget for all memory heaps.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
void vmaGetPoolName(VmaAllocator allocator, VmaPool pool, const char **ppName)
Retrieves name of a custom pool.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:2400
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
void vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo *pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.