23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2028 #ifndef VMA_RECORDING_ENABLED
2029 #define VMA_RECORDING_ENABLED 0
2032 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2036 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2037 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2038 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2039 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2040 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2041 extern PFN_vkAllocateMemory vkAllocateMemory;
2042 extern PFN_vkFreeMemory vkFreeMemory;
2043 extern PFN_vkMapMemory vkMapMemory;
2044 extern PFN_vkUnmapMemory vkUnmapMemory;
2045 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2046 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2047 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2048 extern PFN_vkBindImageMemory vkBindImageMemory;
2049 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2050 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2051 extern PFN_vkCreateBuffer vkCreateBuffer;
2052 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2053 extern PFN_vkCreateImage vkCreateImage;
2054 extern PFN_vkDestroyImage vkDestroyImage;
2055 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2056 #if VMA_VULKAN_VERSION >= 1001000
2057 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2058 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2059 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2060 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2061 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2066 #include <vulkan/vulkan.h>
2072 #if !defined(VMA_VULKAN_VERSION)
2073 #if defined(VK_VERSION_1_2)
2074 #define VMA_VULKAN_VERSION 1002000
2075 #elif defined(VK_VERSION_1_1)
2076 #define VMA_VULKAN_VERSION 1001000
2078 #define VMA_VULKAN_VERSION 1000000
2082 #if !defined(VMA_DEDICATED_ALLOCATION)
2083 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2084 #define VMA_DEDICATED_ALLOCATION 1
2086 #define VMA_DEDICATED_ALLOCATION 0
2090 #if !defined(VMA_BIND_MEMORY2)
2091 #if VK_KHR_bind_memory2
2092 #define VMA_BIND_MEMORY2 1
2094 #define VMA_BIND_MEMORY2 0
2098 #if !defined(VMA_MEMORY_BUDGET)
2099 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2100 #define VMA_MEMORY_BUDGET 1
2102 #define VMA_MEMORY_BUDGET 0
2107 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2108 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2109 #define VMA_BUFFER_DEVICE_ADDRESS 1
2111 #define VMA_BUFFER_DEVICE_ADDRESS 0
2116 #if !defined(VMA_MEMORY_PRIORITY)
2117 #if VK_EXT_memory_priority
2118 #define VMA_MEMORY_PRIORITY 1
2120 #define VMA_MEMORY_PRIORITY 0
2129 #ifndef VMA_CALL_PRE
2130 #define VMA_CALL_PRE
2132 #ifndef VMA_CALL_POST
2133 #define VMA_CALL_POST
2147 #ifndef VMA_LEN_IF_NOT_NULL
2148 #define VMA_LEN_IF_NOT_NULL(len)
2153 #ifndef VMA_NULLABLE
2155 #define VMA_NULLABLE _Nullable
2157 #define VMA_NULLABLE
2163 #ifndef VMA_NOT_NULL
2165 #define VMA_NOT_NULL _Nonnull
2167 #define VMA_NOT_NULL
2173 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2174 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2175 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2177 #define VMA_NOT_NULL_NON_DISPATCHABLE
2181 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2182 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2183 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2185 #define VMA_NULLABLE_NON_DISPATCHABLE
2203 uint32_t memoryType,
2204 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2206 void* VMA_NULLABLE pUserData);
2210 uint32_t memoryType,
2211 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2213 void* VMA_NULLABLE pUserData);
2370 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2371 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2372 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2374 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2375 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2376 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2378 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2379 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2469 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2542 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2550 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2560 uint32_t memoryTypeIndex,
2561 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2573 uint32_t frameIndex);
2669 #ifndef VMA_STATS_STRING_ENABLED
2670 #define VMA_STATS_STRING_ENABLED 1
2673 #if VMA_STATS_STRING_ENABLED
2680 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2681 VkBool32 detailedMap);
2685 char* VMA_NULLABLE pStatsString);
2946 uint32_t memoryTypeBits,
2948 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2964 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2966 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2982 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2984 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3151 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3179 size_t* VMA_NULLABLE pLostAllocationCount);
3206 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3216 const char* VMA_NULLABLE pName);
3310 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3336 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3338 size_t allocationCount,
3339 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3340 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3350 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3358 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3383 size_t allocationCount,
3384 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3441 void* VMA_NULLABLE pUserData);
3498 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3536 VkDeviceSize offset,
3563 VkDeviceSize offset,
3582 uint32_t allocationCount,
3583 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3584 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3585 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3603 uint32_t allocationCount,
3604 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3605 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3606 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3685 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3719 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3857 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3858 size_t allocationCount,
3859 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3878 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3893 VkDeviceSize allocationLocalOffset,
3894 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3895 const void* VMA_NULLABLE pNext);
3912 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3927 VkDeviceSize allocationLocalOffset,
3928 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3929 const void* VMA_NULLABLE pNext);
3963 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3965 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3982 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3988 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3990 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
4007 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
4017 #if defined(__cplusplus) && defined(__INTELLISENSE__)
4018 #define VMA_IMPLEMENTATION
4021 #ifdef VMA_IMPLEMENTATION
4022 #undef VMA_IMPLEMENTATION
4029 #if VMA_RECORDING_ENABLED
4032 #include <windows.h>
4052 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4053 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4062 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4063 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4064 #if defined(VK_NO_PROTOTYPES)
4065 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4066 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4079 #if VMA_USE_STL_CONTAINERS
4080 #define VMA_USE_STL_VECTOR 1
4081 #define VMA_USE_STL_UNORDERED_MAP 1
4082 #define VMA_USE_STL_LIST 1
4085 #ifndef VMA_USE_STL_SHARED_MUTEX
4087 #if __cplusplus >= 201703L
4088 #define VMA_USE_STL_SHARED_MUTEX 1
4092 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4093 #define VMA_USE_STL_SHARED_MUTEX 1
4095 #define VMA_USE_STL_SHARED_MUTEX 0
4103 #if VMA_USE_STL_VECTOR
4107 #if VMA_USE_STL_UNORDERED_MAP
4108 #include <unordered_map>
4111 #if VMA_USE_STL_LIST
4120 #include <algorithm>
4125 #define VMA_NULL nullptr
4128 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4130 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4133 if(alignment <
sizeof(
void*))
4135 alignment =
sizeof(
void*);
4138 return memalign(alignment, size);
4140 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4143 #if defined(__APPLE__)
4144 #include <AvailabilityMacros.h>
4147 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4149 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4150 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4157 if (__builtin_available(macOS 10.15, iOS 13, *))
4158 return aligned_alloc(alignment, size);
4162 if(alignment <
sizeof(
void*))
4164 alignment =
sizeof(
void*);
4168 if(posix_memalign(&pointer, alignment, size) == 0)
4172 #elif defined(_WIN32)
4173 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4175 return _aligned_malloc(size, alignment);
4178 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4180 return aligned_alloc(alignment, size);
4185 static void vma_aligned_free(
void* ptr)
4190 static void vma_aligned_free(
void* VMA_NULLABLE ptr)
4204 #define VMA_ASSERT(expr)
4206 #define VMA_ASSERT(expr) assert(expr)
4212 #ifndef VMA_HEAVY_ASSERT
4214 #define VMA_HEAVY_ASSERT(expr)
4216 #define VMA_HEAVY_ASSERT(expr)
4220 #ifndef VMA_ALIGN_OF
4221 #define VMA_ALIGN_OF(type) (__alignof(type))
4224 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4225 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4228 #ifndef VMA_SYSTEM_ALIGNED_FREE
4230 #if defined(VMA_SYSTEM_FREE)
4231 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4233 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4238 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4242 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4246 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4250 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4253 #ifndef VMA_DEBUG_LOG
4254 #define VMA_DEBUG_LOG(format, ...)
4264 #if VMA_STATS_STRING_ENABLED
4265 static inline void VmaUint32ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint32_t num)
4267 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4269 static inline void VmaUint64ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint64_t num)
4271 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4273 static inline void VmaPtrToStr(
char* VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
4275 snprintf(outStr, strLen,
"%p", ptr);
4283 void Lock() { m_Mutex.lock(); }
4284 void Unlock() { m_Mutex.unlock(); }
4285 bool TryLock() {
return m_Mutex.try_lock(); }
4289 #define VMA_MUTEX VmaMutex
4293 #ifndef VMA_RW_MUTEX
4294 #if VMA_USE_STL_SHARED_MUTEX
4296 #include <shared_mutex>
4300 void LockRead() { m_Mutex.lock_shared(); }
4301 void UnlockRead() { m_Mutex.unlock_shared(); }
4302 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4303 void LockWrite() { m_Mutex.lock(); }
4304 void UnlockWrite() { m_Mutex.unlock(); }
4305 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4307 std::shared_mutex m_Mutex;
4309 #define VMA_RW_MUTEX VmaRWMutex
4310 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4316 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4317 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4318 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4319 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4320 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4321 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4322 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4326 #define VMA_RW_MUTEX VmaRWMutex
4332 void LockRead() { m_Mutex.Lock(); }
4333 void UnlockRead() { m_Mutex.Unlock(); }
4334 bool TryLockRead() {
return m_Mutex.TryLock(); }
4335 void LockWrite() { m_Mutex.Lock(); }
4336 void UnlockWrite() { m_Mutex.Unlock(); }
4337 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4341 #define VMA_RW_MUTEX VmaRWMutex
4348 #ifndef VMA_ATOMIC_UINT32
4350 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4353 #ifndef VMA_ATOMIC_UINT64
4355 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4358 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4363 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4366 #ifndef VMA_MIN_ALIGNMENT
4371 #ifdef VMA_DEBUG_ALIGNMENT
4372 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT
4374 #define VMA_MIN_ALIGNMENT (1)
4378 #ifndef VMA_DEBUG_MARGIN
4383 #define VMA_DEBUG_MARGIN (0)
4386 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4391 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4394 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4400 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4403 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4408 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4411 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4416 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4419 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
4424 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
4427 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4429 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4432 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4434 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4437 #ifndef VMA_CLASS_NO_COPY
4438 #define VMA_CLASS_NO_COPY(className) \
4440 className(const className&) = delete; \
4441 className& operator=(const className&) = delete;
4444 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4447 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4449 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4450 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4458 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4459 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4460 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4462 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4464 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4465 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4468 static inline uint32_t VmaCountBitsSet(uint32_t v)
4470 uint32_t c = v - ((v >> 1) & 0x55555555);
4471 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4472 c = ((c >> 4) + c) & 0x0F0F0F0F;
4473 c = ((c >> 8) + c) & 0x00FF00FF;
4474 c = ((c >> 16) + c) & 0x0000FFFF;
4483 template <
typename T>
4484 inline bool VmaIsPow2(T x)
4486 return (x & (x-1)) == 0;
4491 template <
typename T>
4492 static inline T VmaAlignUp(T val, T alignment)
4494 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4495 return (val + alignment - 1) & ~(alignment - 1);
4499 template <
typename T>
4500 static inline T VmaAlignDown(T val, T alignment)
4502 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4503 return val & ~(alignment - 1);
4507 template <
typename T>
4508 static inline T VmaRoundDiv(T x, T y)
4510 return (x + (y / (T)2)) / y;
4514 static inline uint32_t VmaNextPow2(uint32_t v)
4525 static inline uint64_t VmaNextPow2(uint64_t v)
4539 static inline uint32_t VmaPrevPow2(uint32_t v)
4549 static inline uint64_t VmaPrevPow2(uint64_t v)
4561 static inline bool VmaStrIsEmpty(
const char* pStr)
4563 return pStr == VMA_NULL || *pStr ==
'\0';
4566 #if VMA_STATS_STRING_ENABLED
4568 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4588 template<
typename Iterator,
typename Compare>
4589 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4591 Iterator centerValue = end; --centerValue;
4592 Iterator insertIndex = beg;
4593 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4595 if(cmp(*memTypeIndex, *centerValue))
4597 if(insertIndex != memTypeIndex)
4599 VMA_SWAP(*memTypeIndex, *insertIndex);
4604 if(insertIndex != centerValue)
4606 VMA_SWAP(*insertIndex, *centerValue);
4611 template<
typename Iterator,
typename Compare>
4612 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4616 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4617 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4618 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4622 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4633 static inline bool VmaBlocksOnSamePage(
4634 VkDeviceSize resourceAOffset,
4635 VkDeviceSize resourceASize,
4636 VkDeviceSize resourceBOffset,
4637 VkDeviceSize pageSize)
4639 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4640 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4641 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4642 VkDeviceSize resourceBStart = resourceBOffset;
4643 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4644 return resourceAEndPage == resourceBStartPage;
4647 enum VmaSuballocationType
4649 VMA_SUBALLOCATION_TYPE_FREE = 0,
4650 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4651 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4652 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4653 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4654 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4655 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4664 static inline bool VmaIsBufferImageGranularityConflict(
4665 VmaSuballocationType suballocType1,
4666 VmaSuballocationType suballocType2)
4668 if(suballocType1 > suballocType2)
4670 VMA_SWAP(suballocType1, suballocType2);
4673 switch(suballocType1)
4675 case VMA_SUBALLOCATION_TYPE_FREE:
4677 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4679 case VMA_SUBALLOCATION_TYPE_BUFFER:
4681 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4682 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4683 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4685 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4686 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4687 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4688 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4690 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4691 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4699 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4701 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4702 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4703 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4704 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4706 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4713 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4715 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4716 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4717 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4718 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4720 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4733 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4735 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4736 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4737 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4738 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4744 VMA_CLASS_NO_COPY(VmaMutexLock)
4746 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4747 m_pMutex(useMutex ? &mutex : VMA_NULL)
4748 {
if(m_pMutex) { m_pMutex->Lock(); } }
4750 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4752 VMA_MUTEX* m_pMutex;
4756 struct VmaMutexLockRead
4758 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4760 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4761 m_pMutex(useMutex ? &mutex : VMA_NULL)
4762 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4763 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4765 VMA_RW_MUTEX* m_pMutex;
4769 struct VmaMutexLockWrite
4771 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4773 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4774 m_pMutex(useMutex ? &mutex : VMA_NULL)
4775 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4776 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4778 VMA_RW_MUTEX* m_pMutex;
4781 #if VMA_DEBUG_GLOBAL_MUTEX
4782 static VMA_MUTEX gDebugGlobalMutex;
4783 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4785 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4789 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4800 template <
typename CmpLess,
typename IterT,
typename KeyT>
4801 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4803 size_t down = 0, up = (end - beg);
4806 const size_t mid = down + (up - down) / 2;
4807 if(cmp(*(beg+mid), key))
4819 template<
typename CmpLess,
typename IterT,
typename KeyT>
4820 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4822 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4823 beg, end, value, cmp);
4825 (!cmp(*it, value) && !cmp(value, *it)))
4837 template<
typename T>
4838 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4840 for(uint32_t i = 0; i < count; ++i)
4842 const T iPtr = arr[i];
4843 if(iPtr == VMA_NULL)
4847 for(uint32_t j = i + 1; j < count; ++j)
4858 template<
typename MainT,
typename NewT>
4859 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4861 newStruct->pNext = mainStruct->pNext;
4862 mainStruct->pNext = newStruct;
4868 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4870 void* result = VMA_NULL;
4871 if((pAllocationCallbacks != VMA_NULL) &&
4872 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4874 result = (*pAllocationCallbacks->pfnAllocation)(
4875 pAllocationCallbacks->pUserData,
4878 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4882 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4884 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4888 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4890 if((pAllocationCallbacks != VMA_NULL) &&
4891 (pAllocationCallbacks->pfnFree != VMA_NULL))
4893 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4897 VMA_SYSTEM_ALIGNED_FREE(ptr);
4901 template<
typename T>
4902 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4904 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4907 template<
typename T>
4908 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4910 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4913 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4915 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4917 template<
typename T>
4918 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4921 VmaFree(pAllocationCallbacks, ptr);
4924 template<
typename T>
4925 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4929 for(
size_t i = count; i--; )
4933 VmaFree(pAllocationCallbacks, ptr);
4937 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4939 if(srcStr != VMA_NULL)
4941 const size_t len = strlen(srcStr);
4942 char*
const result = vma_new_array(allocs,
char, len + 1);
4943 memcpy(result, srcStr, len + 1);
4952 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4956 const size_t len = strlen(str);
4957 vma_delete_array(allocs, str, len + 1);
4962 template<
typename T>
4963 class VmaStlAllocator
4966 const VkAllocationCallbacks*
const m_pCallbacks;
4967 typedef T value_type;
4969 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4970 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4972 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4973 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4975 template<
typename U>
4976 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4978 return m_pCallbacks == rhs.m_pCallbacks;
4980 template<
typename U>
4981 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4983 return m_pCallbacks != rhs.m_pCallbacks;
4986 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4987 VmaStlAllocator(
const VmaStlAllocator&) =
default;
4990 #if VMA_USE_STL_VECTOR
4992 #define VmaVector std::vector
4994 template<
typename T,
typename allocatorT>
4995 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4997 vec.insert(vec.begin() + index, item);
5000 template<
typename T,
typename allocatorT>
5001 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
5003 vec.erase(vec.begin() + index);
5011 template<
typename T,
typename AllocatorT>
5015 typedef T value_type;
5017 VmaVector(
const AllocatorT& allocator) :
5018 m_Allocator(allocator),
5025 VmaVector(
size_t count,
const AllocatorT& allocator) :
5026 m_Allocator(allocator),
5027 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
5035 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
5036 : VmaVector(count, allocator) {}
5038 VmaVector(
const VmaVector<T, AllocatorT>& src) :
5039 m_Allocator(src.m_Allocator),
5040 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
5041 m_Count(src.m_Count),
5042 m_Capacity(src.m_Count)
5046 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
5052 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5055 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5059 resize(rhs.m_Count);
5062 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5068 bool empty()
const {
return m_Count == 0; }
5069 size_t size()
const {
return m_Count; }
5070 T* data() {
return m_pArray; }
5071 const T* data()
const {
return m_pArray; }
5073 T& operator[](
size_t index)
5075 VMA_HEAVY_ASSERT(index < m_Count);
5076 return m_pArray[index];
5078 const T& operator[](
size_t index)
const
5080 VMA_HEAVY_ASSERT(index < m_Count);
5081 return m_pArray[index];
5086 VMA_HEAVY_ASSERT(m_Count > 0);
5089 const T& front()
const
5091 VMA_HEAVY_ASSERT(m_Count > 0);
5096 VMA_HEAVY_ASSERT(m_Count > 0);
5097 return m_pArray[m_Count - 1];
5099 const T& back()
const
5101 VMA_HEAVY_ASSERT(m_Count > 0);
5102 return m_pArray[m_Count - 1];
5105 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5107 newCapacity = VMA_MAX(newCapacity, m_Count);
5109 if((newCapacity < m_Capacity) && !freeMemory)
5111 newCapacity = m_Capacity;
5114 if(newCapacity != m_Capacity)
5116 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5119 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5121 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5122 m_Capacity = newCapacity;
5123 m_pArray = newArray;
5127 void resize(
size_t newCount)
5129 size_t newCapacity = m_Capacity;
5130 if(newCount > m_Capacity)
5132 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5135 if(newCapacity != m_Capacity)
5137 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5138 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5139 if(elementsToCopy != 0)
5141 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5143 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5144 m_Capacity = newCapacity;
5145 m_pArray = newArray;
5156 void shrink_to_fit()
5158 if(m_Capacity > m_Count)
5160 T* newArray = VMA_NULL;
5163 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
5164 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5166 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5167 m_Capacity = m_Count;
5168 m_pArray = newArray;
5172 void insert(
size_t index,
const T& src)
5174 VMA_HEAVY_ASSERT(index <= m_Count);
5175 const size_t oldCount = size();
5176 resize(oldCount + 1);
5177 if(index < oldCount)
5179 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5181 m_pArray[index] = src;
5184 void remove(
size_t index)
5186 VMA_HEAVY_ASSERT(index < m_Count);
5187 const size_t oldCount = size();
5188 if(index < oldCount - 1)
5190 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5192 resize(oldCount - 1);
5195 void push_back(
const T& src)
5197 const size_t newIndex = size();
5198 resize(newIndex + 1);
5199 m_pArray[newIndex] = src;
5204 VMA_HEAVY_ASSERT(m_Count > 0);
5208 void push_front(
const T& src)
5215 VMA_HEAVY_ASSERT(m_Count > 0);
5219 typedef T* iterator;
5221 iterator begin() {
return m_pArray; }
5222 iterator end() {
return m_pArray + m_Count; }
5225 AllocatorT m_Allocator;
5231 template<
typename T,
typename allocatorT>
5232 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5234 vec.insert(index, item);
5237 template<
typename T,
typename allocatorT>
5238 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5245 template<
typename CmpLess,
typename VectorT>
5246 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5248 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5250 vector.data() + vector.size(),
5252 CmpLess()) - vector.data();
5253 VmaVectorInsert(vector, indexToInsert, value);
5254 return indexToInsert;
5257 template<
typename CmpLess,
typename VectorT>
5258 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5261 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5266 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5268 size_t indexToRemove = it - vector.begin();
5269 VmaVectorRemove(vector, indexToRemove);
5286 template<
typename T,
typename AllocatorT,
size_t N>
5287 class VmaSmallVector
5290 typedef T value_type;
5292 VmaSmallVector(
const AllocatorT& allocator) :
5294 m_DynamicArray(allocator)
5297 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5299 m_DynamicArray(count > N ? count : 0, allocator)
5302 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5303 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5304 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5305 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5307 bool empty()
const {
return m_Count == 0; }
5308 size_t size()
const {
return m_Count; }
5309 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5310 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5312 T& operator[](
size_t index)
5314 VMA_HEAVY_ASSERT(index < m_Count);
5315 return data()[index];
5317 const T& operator[](
size_t index)
const
5319 VMA_HEAVY_ASSERT(index < m_Count);
5320 return data()[index];
5325 VMA_HEAVY_ASSERT(m_Count > 0);
5328 const T& front()
const
5330 VMA_HEAVY_ASSERT(m_Count > 0);
5335 VMA_HEAVY_ASSERT(m_Count > 0);
5336 return data()[m_Count - 1];
5338 const T& back()
const
5340 VMA_HEAVY_ASSERT(m_Count > 0);
5341 return data()[m_Count - 1];
5344 void resize(
size_t newCount,
bool freeMemory =
false)
5346 if(newCount > N && m_Count > N)
5349 m_DynamicArray.resize(newCount);
5352 m_DynamicArray.shrink_to_fit();
5355 else if(newCount > N && m_Count <= N)
5358 m_DynamicArray.resize(newCount);
5361 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5364 else if(newCount <= N && m_Count > N)
5369 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5371 m_DynamicArray.resize(0);
5374 m_DynamicArray.shrink_to_fit();
5384 void clear(
bool freeMemory =
false)
5386 m_DynamicArray.clear();
5389 m_DynamicArray.shrink_to_fit();
5394 void insert(
size_t index,
const T& src)
5396 VMA_HEAVY_ASSERT(index <= m_Count);
5397 const size_t oldCount = size();
5398 resize(oldCount + 1);
5399 T*
const dataPtr = data();
5400 if(index < oldCount)
5403 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5405 dataPtr[index] = src;
5408 void remove(
size_t index)
5410 VMA_HEAVY_ASSERT(index < m_Count);
5411 const size_t oldCount = size();
5412 if(index < oldCount - 1)
5415 T*
const dataPtr = data();
5416 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5418 resize(oldCount - 1);
5421 void push_back(
const T& src)
5423 const size_t newIndex = size();
5424 resize(newIndex + 1);
5425 data()[newIndex] = src;
5430 VMA_HEAVY_ASSERT(m_Count > 0);
5434 void push_front(
const T& src)
5441 VMA_HEAVY_ASSERT(m_Count > 0);
5445 typedef T* iterator;
5447 iterator begin() {
return data(); }
5448 iterator end() {
return data() + m_Count; }
5453 VmaVector<T, AllocatorT> m_DynamicArray;
5464 template<
typename T>
5465 class VmaPoolAllocator
5467 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5469 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5470 ~VmaPoolAllocator();
5471 template<
typename... Types> T* Alloc(Types... args);
5477 uint32_t NextFreeIndex;
5478 alignas(T)
char Value[
sizeof(T)];
5485 uint32_t FirstFreeIndex;
5488 const VkAllocationCallbacks* m_pAllocationCallbacks;
5489 const uint32_t m_FirstBlockCapacity;
5490 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5492 ItemBlock& CreateNewBlock();
5495 template<
typename T>
5496 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5497 m_pAllocationCallbacks(pAllocationCallbacks),
5498 m_FirstBlockCapacity(firstBlockCapacity),
5499 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5501 VMA_ASSERT(m_FirstBlockCapacity > 1);
5504 template<
typename T>
5505 VmaPoolAllocator<T>::~VmaPoolAllocator()
5507 for(
size_t i = m_ItemBlocks.size(); i--; )
5508 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5509 m_ItemBlocks.clear();
5512 template<
typename T>
5513 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5515 for(
size_t i = m_ItemBlocks.size(); i--; )
5517 ItemBlock& block = m_ItemBlocks[i];
5519 if(block.FirstFreeIndex != UINT32_MAX)
5521 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5522 block.FirstFreeIndex = pItem->NextFreeIndex;
5523 T* result = (T*)&pItem->Value;
5524 new(result)T(std::forward<Types>(args)...);
5530 ItemBlock& newBlock = CreateNewBlock();
5531 Item*
const pItem = &newBlock.pItems[0];
5532 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5533 T* result = (T*)&pItem->Value;
5534 new(result)T(std::forward<Types>(args)...);
5538 template<
typename T>
5539 void VmaPoolAllocator<T>::Free(T* ptr)
5542 for(
size_t i = m_ItemBlocks.size(); i--; )
5544 ItemBlock& block = m_ItemBlocks[i];
5548 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5551 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5554 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5555 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5556 block.FirstFreeIndex = index;
5560 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5563 template<
typename T>
5564 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5566 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5567 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5569 const ItemBlock newBlock = {
5570 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5574 m_ItemBlocks.push_back(newBlock);
5577 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5578 newBlock.pItems[i].NextFreeIndex = i + 1;
5579 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5580 return m_ItemBlocks.back();
5586 #if VMA_USE_STL_LIST
5588 #define VmaList std::list
5592 template<
typename T>
5601 template<
typename T>
5604 VMA_CLASS_NO_COPY(VmaRawList)
5606 typedef VmaListItem<T> ItemType;
5608 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5612 size_t GetCount()
const {
return m_Count; }
5613 bool IsEmpty()
const {
return m_Count == 0; }
5615 ItemType* Front() {
return m_pFront; }
5616 const ItemType* Front()
const {
return m_pFront; }
5617 ItemType* Back() {
return m_pBack; }
5618 const ItemType* Back()
const {
return m_pBack; }
5620 ItemType* PushBack();
5621 ItemType* PushFront();
5622 ItemType* PushBack(
const T& value);
5623 ItemType* PushFront(
const T& value);
5628 ItemType* InsertBefore(ItemType* pItem);
5630 ItemType* InsertAfter(ItemType* pItem);
5632 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5633 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5635 void Remove(ItemType* pItem);
5638 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5639 VmaPoolAllocator<ItemType> m_ItemAllocator;
5645 template<
typename T>
5646 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5647 m_pAllocationCallbacks(pAllocationCallbacks),
5648 m_ItemAllocator(pAllocationCallbacks, 128),
5655 template<
typename T>
5656 VmaRawList<T>::~VmaRawList()
5662 template<
typename T>
5663 void VmaRawList<T>::Clear()
5665 if(IsEmpty() ==
false)
5667 ItemType* pItem = m_pBack;
5668 while(pItem != VMA_NULL)
5670 ItemType*
const pPrevItem = pItem->pPrev;
5671 m_ItemAllocator.Free(pItem);
5674 m_pFront = VMA_NULL;
5680 template<
typename T>
5681 VmaListItem<T>* VmaRawList<T>::PushBack()
5683 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5684 pNewItem->pNext = VMA_NULL;
5687 pNewItem->pPrev = VMA_NULL;
5688 m_pFront = pNewItem;
5694 pNewItem->pPrev = m_pBack;
5695 m_pBack->pNext = pNewItem;
5702 template<
typename T>
5703 VmaListItem<T>* VmaRawList<T>::PushFront()
5705 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5706 pNewItem->pPrev = VMA_NULL;
5709 pNewItem->pNext = VMA_NULL;
5710 m_pFront = pNewItem;
5716 pNewItem->pNext = m_pFront;
5717 m_pFront->pPrev = pNewItem;
5718 m_pFront = pNewItem;
5724 template<
typename T>
5725 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5727 ItemType*
const pNewItem = PushBack();
5728 pNewItem->Value = value;
5732 template<
typename T>
5733 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5735 ItemType*
const pNewItem = PushFront();
5736 pNewItem->Value = value;
5740 template<
typename T>
5741 void VmaRawList<T>::PopBack()
5743 VMA_HEAVY_ASSERT(m_Count > 0);
5744 ItemType*
const pBackItem = m_pBack;
5745 ItemType*
const pPrevItem = pBackItem->pPrev;
5746 if(pPrevItem != VMA_NULL)
5748 pPrevItem->pNext = VMA_NULL;
5750 m_pBack = pPrevItem;
5751 m_ItemAllocator.Free(pBackItem);
5755 template<
typename T>
5756 void VmaRawList<T>::PopFront()
5758 VMA_HEAVY_ASSERT(m_Count > 0);
5759 ItemType*
const pFrontItem = m_pFront;
5760 ItemType*
const pNextItem = pFrontItem->pNext;
5761 if(pNextItem != VMA_NULL)
5763 pNextItem->pPrev = VMA_NULL;
5765 m_pFront = pNextItem;
5766 m_ItemAllocator.Free(pFrontItem);
5770 template<
typename T>
5771 void VmaRawList<T>::Remove(ItemType* pItem)
5773 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5774 VMA_HEAVY_ASSERT(m_Count > 0);
5776 if(pItem->pPrev != VMA_NULL)
5778 pItem->pPrev->pNext = pItem->pNext;
5782 VMA_HEAVY_ASSERT(m_pFront == pItem);
5783 m_pFront = pItem->pNext;
5786 if(pItem->pNext != VMA_NULL)
5788 pItem->pNext->pPrev = pItem->pPrev;
5792 VMA_HEAVY_ASSERT(m_pBack == pItem);
5793 m_pBack = pItem->pPrev;
5796 m_ItemAllocator.Free(pItem);
5800 template<
typename T>
5801 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5803 if(pItem != VMA_NULL)
5805 ItemType*
const prevItem = pItem->pPrev;
5806 ItemType*
const newItem = m_ItemAllocator.Alloc();
5807 newItem->pPrev = prevItem;
5808 newItem->pNext = pItem;
5809 pItem->pPrev = newItem;
5810 if(prevItem != VMA_NULL)
5812 prevItem->pNext = newItem;
5816 VMA_HEAVY_ASSERT(m_pFront == pItem);
5826 template<
typename T>
5827 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5829 if(pItem != VMA_NULL)
5831 ItemType*
const nextItem = pItem->pNext;
5832 ItemType*
const newItem = m_ItemAllocator.Alloc();
5833 newItem->pNext = nextItem;
5834 newItem->pPrev = pItem;
5835 pItem->pNext = newItem;
5836 if(nextItem != VMA_NULL)
5838 nextItem->pPrev = newItem;
5842 VMA_HEAVY_ASSERT(m_pBack == pItem);
5852 template<
typename T>
5853 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5855 ItemType*
const newItem = InsertBefore(pItem);
5856 newItem->Value = value;
5860 template<
typename T>
5861 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5863 ItemType*
const newItem = InsertAfter(pItem);
5864 newItem->Value = value;
5868 template<
typename T,
typename AllocatorT>
5871 VMA_CLASS_NO_COPY(VmaList)
5882 T& operator*()
const
5884 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5885 return m_pItem->Value;
5887 T* operator->()
const
5889 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5890 return &m_pItem->Value;
5893 iterator& operator++()
5895 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5896 m_pItem = m_pItem->pNext;
5899 iterator& operator--()
5901 if(m_pItem != VMA_NULL)
5903 m_pItem = m_pItem->pPrev;
5907 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5908 m_pItem = m_pList->Back();
5913 iterator operator++(
int)
5915 iterator result = *
this;
5919 iterator operator--(
int)
5921 iterator result = *
this;
5926 bool operator==(
const iterator& rhs)
const
5928 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5929 return m_pItem == rhs.m_pItem;
5931 bool operator!=(
const iterator& rhs)
const
5933 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5934 return m_pItem != rhs.m_pItem;
5938 VmaRawList<T>* m_pList;
5939 VmaListItem<T>* m_pItem;
5941 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5947 friend class VmaList<T, AllocatorT>;
5950 class const_iterator
5959 const_iterator(
const iterator& src) :
5960 m_pList(src.m_pList),
5961 m_pItem(src.m_pItem)
5965 const T& operator*()
const
5967 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5968 return m_pItem->Value;
5970 const T* operator->()
const
5972 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5973 return &m_pItem->Value;
5976 const_iterator& operator++()
5978 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5979 m_pItem = m_pItem->pNext;
5982 const_iterator& operator--()
5984 if(m_pItem != VMA_NULL)
5986 m_pItem = m_pItem->pPrev;
5990 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5991 m_pItem = m_pList->Back();
5996 const_iterator operator++(
int)
5998 const_iterator result = *
this;
6002 const_iterator operator--(
int)
6004 const_iterator result = *
this;
6009 bool operator==(
const const_iterator& rhs)
const
6011 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6012 return m_pItem == rhs.m_pItem;
6014 bool operator!=(
const const_iterator& rhs)
const
6016 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6017 return m_pItem != rhs.m_pItem;
6021 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
6027 const VmaRawList<T>* m_pList;
6028 const VmaListItem<T>* m_pItem;
6030 friend class VmaList<T, AllocatorT>;
6033 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
6035 bool empty()
const {
return m_RawList.IsEmpty(); }
6036 size_t size()
const {
return m_RawList.GetCount(); }
6038 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
6039 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
6041 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
6042 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
6044 void clear() { m_RawList.Clear(); }
6045 void push_back(
const T& value) { m_RawList.PushBack(value); }
6046 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
6047 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
6050 VmaRawList<T> m_RawList;
6069 template<
typename ItemTypeTraits>
6070 class VmaIntrusiveLinkedList
6073 typedef typename ItemTypeTraits::ItemType ItemType;
6074 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
6075 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
6077 VmaIntrusiveLinkedList() { }
6078 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6079 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
6080 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
6082 src.m_Front = src.m_Back = VMA_NULL;
6085 ~VmaIntrusiveLinkedList()
6087 VMA_HEAVY_ASSERT(IsEmpty());
6089 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6090 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
6094 VMA_HEAVY_ASSERT(IsEmpty());
6095 m_Front = src.m_Front;
6096 m_Back = src.m_Back;
6097 m_Count = src.m_Count;
6098 src.m_Front = src.m_Back = VMA_NULL;
6107 ItemType* item = m_Back;
6108 while(item != VMA_NULL)
6110 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
6111 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6112 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6120 size_t GetCount()
const {
return m_Count; }
6121 bool IsEmpty()
const {
return m_Count == 0; }
6122 ItemType* Front() {
return m_Front; }
6123 const ItemType* Front()
const {
return m_Front; }
6124 ItemType* Back() {
return m_Back; }
6125 const ItemType* Back()
const {
return m_Back; }
6126 void PushBack(ItemType* item)
6128 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6137 ItemTypeTraits::AccessPrev(item) = m_Back;
6138 ItemTypeTraits::AccessNext(m_Back) = item;
6143 void PushFront(ItemType* item)
6145 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6154 ItemTypeTraits::AccessNext(item) = m_Front;
6155 ItemTypeTraits::AccessPrev(m_Front) = item;
6162 VMA_HEAVY_ASSERT(m_Count > 0);
6163 ItemType*
const backItem = m_Back;
6164 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
6165 if(prevItem != VMA_NULL)
6167 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
6171 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
6172 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
6175 ItemType* PopFront()
6177 VMA_HEAVY_ASSERT(m_Count > 0);
6178 ItemType*
const frontItem = m_Front;
6179 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
6180 if(nextItem != VMA_NULL)
6182 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
6186 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
6187 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
6192 void InsertBefore(ItemType* existingItem, ItemType* newItem)
6194 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6195 if(existingItem != VMA_NULL)
6197 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
6198 ItemTypeTraits::AccessPrev(newItem) = prevItem;
6199 ItemTypeTraits::AccessNext(newItem) = existingItem;
6200 ItemTypeTraits::AccessPrev(existingItem) = newItem;
6201 if(prevItem != VMA_NULL)
6203 ItemTypeTraits::AccessNext(prevItem) = newItem;
6207 VMA_HEAVY_ASSERT(m_Front == existingItem);
6216 void InsertAfter(ItemType* existingItem, ItemType* newItem)
6218 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6219 if(existingItem != VMA_NULL)
6221 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
6222 ItemTypeTraits::AccessNext(newItem) = nextItem;
6223 ItemTypeTraits::AccessPrev(newItem) = existingItem;
6224 ItemTypeTraits::AccessNext(existingItem) = newItem;
6225 if(nextItem != VMA_NULL)
6227 ItemTypeTraits::AccessPrev(nextItem) = newItem;
6231 VMA_HEAVY_ASSERT(m_Back == existingItem);
6237 return PushFront(newItem);
6239 void Remove(ItemType* item)
6241 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
6242 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
6244 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
6248 VMA_HEAVY_ASSERT(m_Front == item);
6249 m_Front = ItemTypeTraits::GetNext(item);
6252 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
6254 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
6258 VMA_HEAVY_ASSERT(m_Back == item);
6259 m_Back = ItemTypeTraits::GetPrev(item);
6261 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6262 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6266 ItemType* m_Front = VMA_NULL;
6267 ItemType* m_Back = VMA_NULL;
6277 #if VMA_USE_STL_UNORDERED_MAP
6279 #define VmaPair std::pair
6281 #define VMA_MAP_TYPE(KeyT, ValueT) \
6282 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
6286 template<
typename T1,
typename T2>
6292 VmaPair() : first(), second() { }
6293 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6299 template<
typename KeyT,
typename ValueT>
6303 typedef VmaPair<KeyT, ValueT> PairType;
6304 typedef PairType* iterator;
6306 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6308 iterator begin() {
return m_Vector.begin(); }
6309 iterator end() {
return m_Vector.end(); }
6311 void insert(
const PairType& pair);
6312 iterator find(
const KeyT& key);
6313 void erase(iterator it);
6316 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6319 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6321 template<
typename FirstT,
typename SecondT>
6322 struct VmaPairFirstLess
6324 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6326 return lhs.first < rhs.first;
6328 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6330 return lhs.first < rhsFirst;
6334 template<
typename KeyT,
typename ValueT>
6335 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6337 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6339 m_Vector.data() + m_Vector.size(),
6341 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6342 VmaVectorInsert(m_Vector, indexToInsert, pair);
6345 template<
typename KeyT,
typename ValueT>
6346 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6348 PairType* it = VmaBinaryFindFirstNotLess(
6350 m_Vector.data() + m_Vector.size(),
6352 VmaPairFirstLess<KeyT, ValueT>());
6353 if((it != m_Vector.end()) && (it->first == key))
6359 return m_Vector.end();
6363 template<
typename KeyT,
typename ValueT>
6364 void VmaMap<KeyT, ValueT>::erase(iterator it)
6366 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6375 class VmaDeviceMemoryBlock;
6377 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6379 struct VmaAllocation_T
6382 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6386 FLAG_USER_DATA_STRING = 0x01,
6390 enum ALLOCATION_TYPE
6392 ALLOCATION_TYPE_NONE,
6393 ALLOCATION_TYPE_BLOCK,
6394 ALLOCATION_TYPE_DEDICATED,
6401 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6404 m_pUserData{VMA_NULL},
6405 m_LastUseFrameIndex{currentFrameIndex},
6406 m_MemoryTypeIndex{0},
6407 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6408 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6410 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6412 #if VMA_STATS_STRING_ENABLED
6413 m_CreationFrameIndex = currentFrameIndex;
6414 m_BufferImageUsage = 0;
6420 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6423 VMA_ASSERT(m_pUserData == VMA_NULL);
6426 void InitBlockAllocation(
6427 VmaDeviceMemoryBlock* block,
6428 VkDeviceSize offset,
6429 VkDeviceSize alignment,
6431 uint32_t memoryTypeIndex,
6432 VmaSuballocationType suballocationType,
6436 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6437 VMA_ASSERT(block != VMA_NULL);
6438 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6439 m_Alignment = alignment;
6441 m_MemoryTypeIndex = memoryTypeIndex;
6442 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6443 m_SuballocationType = (uint8_t)suballocationType;
6444 m_BlockAllocation.m_Block = block;
6445 m_BlockAllocation.m_Offset = offset;
6446 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6451 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6452 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6453 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6454 m_MemoryTypeIndex = 0;
6455 m_BlockAllocation.m_Block = VMA_NULL;
6456 m_BlockAllocation.m_Offset = 0;
6457 m_BlockAllocation.m_CanBecomeLost =
true;
6460 void ChangeBlockAllocation(
6462 VmaDeviceMemoryBlock* block,
6463 VkDeviceSize offset);
6465 void ChangeOffset(VkDeviceSize newOffset);
6468 void InitDedicatedAllocation(
6469 uint32_t memoryTypeIndex,
6470 VkDeviceMemory hMemory,
6471 VmaSuballocationType suballocationType,
6475 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6476 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6477 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6480 m_MemoryTypeIndex = memoryTypeIndex;
6481 m_SuballocationType = (uint8_t)suballocationType;
6482 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6483 m_DedicatedAllocation.m_hMemory = hMemory;
6484 m_DedicatedAllocation.m_pMappedData = pMappedData;
6485 m_DedicatedAllocation.m_Prev = VMA_NULL;
6486 m_DedicatedAllocation.m_Next = VMA_NULL;
6489 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6490 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6491 VkDeviceSize GetSize()
const {
return m_Size; }
6492 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6493 void* GetUserData()
const {
return m_pUserData; }
6494 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6495 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6497 VmaDeviceMemoryBlock* GetBlock()
const
6499 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6500 return m_BlockAllocation.m_Block;
6502 VkDeviceSize GetOffset()
const;
6503 VkDeviceMemory GetMemory()
const;
6504 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6505 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6506 void* GetMappedData()
const;
6507 bool CanBecomeLost()
const;
6509 uint32_t GetLastUseFrameIndex()
const
6511 return m_LastUseFrameIndex.load();
6513 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6515 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6525 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6527 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6529 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6540 void BlockAllocMap();
6541 void BlockAllocUnmap();
6542 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6545 #if VMA_STATS_STRING_ENABLED
6546 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6547 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6549 void InitBufferImageUsage(uint32_t bufferImageUsage)
6551 VMA_ASSERT(m_BufferImageUsage == 0);
6552 m_BufferImageUsage = bufferImageUsage;
6555 void PrintParameters(
class VmaJsonWriter& json)
const;
6559 VkDeviceSize m_Alignment;
6560 VkDeviceSize m_Size;
6562 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6563 uint32_t m_MemoryTypeIndex;
6565 uint8_t m_SuballocationType;
6572 struct BlockAllocation
6574 VmaDeviceMemoryBlock* m_Block;
6575 VkDeviceSize m_Offset;
6576 bool m_CanBecomeLost;
6580 struct DedicatedAllocation
6582 VkDeviceMemory m_hMemory;
6583 void* m_pMappedData;
6584 VmaAllocation_T* m_Prev;
6585 VmaAllocation_T* m_Next;
6591 BlockAllocation m_BlockAllocation;
6593 DedicatedAllocation m_DedicatedAllocation;
6596 #if VMA_STATS_STRING_ENABLED
6597 uint32_t m_CreationFrameIndex;
6598 uint32_t m_BufferImageUsage;
6603 friend struct VmaDedicatedAllocationListItemTraits;
6606 struct VmaDedicatedAllocationListItemTraits
6608 typedef VmaAllocation_T ItemType;
6609 static ItemType* GetPrev(
const ItemType* item)
6611 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6612 return item->m_DedicatedAllocation.m_Prev;
6614 static ItemType* GetNext(
const ItemType* item)
6616 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6617 return item->m_DedicatedAllocation.m_Next;
6619 static ItemType*& AccessPrev(ItemType* item)
6621 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6622 return item->m_DedicatedAllocation.m_Prev;
6624 static ItemType*& AccessNext(ItemType* item){
6625 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6626 return item->m_DedicatedAllocation.m_Next;
6634 struct VmaSuballocation
6636 VkDeviceSize offset;
6639 VmaSuballocationType type;
6643 struct VmaSuballocationOffsetLess
6645 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6647 return lhs.offset < rhs.offset;
6650 struct VmaSuballocationOffsetGreater
6652 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6654 return lhs.offset > rhs.offset;
6658 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6661 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6663 enum class VmaAllocationRequestType
6685 struct VmaAllocationRequest
6687 VkDeviceSize offset;
6688 VkDeviceSize sumFreeSize;
6689 VkDeviceSize sumItemSize;
6690 VmaSuballocationList::iterator item;
6691 size_t itemsToMakeLostCount;
6693 VmaAllocationRequestType type;
6695 VkDeviceSize CalcCost()
const
6697 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6705 class VmaBlockMetadata
6709 virtual ~VmaBlockMetadata() { }
6710 virtual void Init(VkDeviceSize size) { m_Size = size; }
6713 virtual bool Validate()
const = 0;
6714 VkDeviceSize GetSize()
const {
return m_Size; }
6715 virtual size_t GetAllocationCount()
const = 0;
6716 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6717 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6719 virtual bool IsEmpty()
const = 0;
6721 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6723 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6725 #if VMA_STATS_STRING_ENABLED
6726 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6732 virtual bool CreateAllocationRequest(
6733 uint32_t currentFrameIndex,
6734 uint32_t frameInUseCount,
6735 VkDeviceSize bufferImageGranularity,
6736 VkDeviceSize allocSize,
6737 VkDeviceSize allocAlignment,
6739 VmaSuballocationType allocType,
6740 bool canMakeOtherLost,
6743 VmaAllocationRequest* pAllocationRequest) = 0;
6745 virtual bool MakeRequestedAllocationsLost(
6746 uint32_t currentFrameIndex,
6747 uint32_t frameInUseCount,
6748 VmaAllocationRequest* pAllocationRequest) = 0;
6750 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6752 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6756 const VmaAllocationRequest& request,
6757 VmaSuballocationType type,
6758 VkDeviceSize allocSize,
6763 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6766 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6768 #if VMA_STATS_STRING_ENABLED
6769 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6770 VkDeviceSize unusedBytes,
6771 size_t allocationCount,
6772 size_t unusedRangeCount)
const;
6773 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6774 VkDeviceSize offset,
6776 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6777 VkDeviceSize offset,
6778 VkDeviceSize size)
const;
6779 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6783 VkDeviceSize m_Size;
6784 const VkAllocationCallbacks* m_pAllocationCallbacks;
6787 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6788 VMA_ASSERT(0 && "Validation failed: " #cond); \
6792 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6794 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6797 virtual ~VmaBlockMetadata_Generic();
6798 virtual void Init(VkDeviceSize size);
6800 virtual bool Validate()
const;
6801 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6802 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6803 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6804 virtual bool IsEmpty()
const;
6806 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6807 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6809 #if VMA_STATS_STRING_ENABLED
6810 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6813 virtual bool CreateAllocationRequest(
6814 uint32_t currentFrameIndex,
6815 uint32_t frameInUseCount,
6816 VkDeviceSize bufferImageGranularity,
6817 VkDeviceSize allocSize,
6818 VkDeviceSize allocAlignment,
6820 VmaSuballocationType allocType,
6821 bool canMakeOtherLost,
6823 VmaAllocationRequest* pAllocationRequest);
6825 virtual bool MakeRequestedAllocationsLost(
6826 uint32_t currentFrameIndex,
6827 uint32_t frameInUseCount,
6828 VmaAllocationRequest* pAllocationRequest);
6830 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6832 virtual VkResult CheckCorruption(
const void* pBlockData);
6835 const VmaAllocationRequest& request,
6836 VmaSuballocationType type,
6837 VkDeviceSize allocSize,
6841 virtual void FreeAtOffset(VkDeviceSize offset);
6846 bool IsBufferImageGranularityConflictPossible(
6847 VkDeviceSize bufferImageGranularity,
6848 VmaSuballocationType& inOutPrevSuballocType)
const;
6851 friend class VmaDefragmentationAlgorithm_Generic;
6852 friend class VmaDefragmentationAlgorithm_Fast;
6854 uint32_t m_FreeCount;
6855 VkDeviceSize m_SumFreeSize;
6856 VmaSuballocationList m_Suballocations;
6859 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6861 bool ValidateFreeSuballocationList()
const;
6865 bool CheckAllocation(
6866 uint32_t currentFrameIndex,
6867 uint32_t frameInUseCount,
6868 VkDeviceSize bufferImageGranularity,
6869 VkDeviceSize allocSize,
6870 VkDeviceSize allocAlignment,
6871 VmaSuballocationType allocType,
6872 VmaSuballocationList::const_iterator suballocItem,
6873 bool canMakeOtherLost,
6874 VkDeviceSize* pOffset,
6875 size_t* itemsToMakeLostCount,
6876 VkDeviceSize* pSumFreeSize,
6877 VkDeviceSize* pSumItemSize)
const;
6879 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6883 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6886 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6889 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6970 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6972 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6975 virtual ~VmaBlockMetadata_Linear();
6976 virtual void Init(VkDeviceSize size);
6978 virtual bool Validate()
const;
6979 virtual size_t GetAllocationCount()
const;
6980 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6981 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6982 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6984 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6985 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6987 #if VMA_STATS_STRING_ENABLED
6988 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6991 virtual bool CreateAllocationRequest(
6992 uint32_t currentFrameIndex,
6993 uint32_t frameInUseCount,
6994 VkDeviceSize bufferImageGranularity,
6995 VkDeviceSize allocSize,
6996 VkDeviceSize allocAlignment,
6998 VmaSuballocationType allocType,
6999 bool canMakeOtherLost,
7001 VmaAllocationRequest* pAllocationRequest);
7003 virtual bool MakeRequestedAllocationsLost(
7004 uint32_t currentFrameIndex,
7005 uint32_t frameInUseCount,
7006 VmaAllocationRequest* pAllocationRequest);
7008 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7010 virtual VkResult CheckCorruption(
const void* pBlockData);
7013 const VmaAllocationRequest& request,
7014 VmaSuballocationType type,
7015 VkDeviceSize allocSize,
7019 virtual void FreeAtOffset(VkDeviceSize offset);
7029 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
7031 enum SECOND_VECTOR_MODE
7033 SECOND_VECTOR_EMPTY,
7038 SECOND_VECTOR_RING_BUFFER,
7044 SECOND_VECTOR_DOUBLE_STACK,
7047 VkDeviceSize m_SumFreeSize;
7048 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7049 uint32_t m_1stVectorIndex;
7050 SECOND_VECTOR_MODE m_2ndVectorMode;
7052 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7053 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7054 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7055 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7058 size_t m_1stNullItemsBeginCount;
7060 size_t m_1stNullItemsMiddleCount;
7062 size_t m_2ndNullItemsCount;
7064 bool ShouldCompact1st()
const;
7065 void CleanupAfterFree();
7067 bool CreateAllocationRequest_LowerAddress(
7068 uint32_t currentFrameIndex,
7069 uint32_t frameInUseCount,
7070 VkDeviceSize bufferImageGranularity,
7071 VkDeviceSize allocSize,
7072 VkDeviceSize allocAlignment,
7073 VmaSuballocationType allocType,
7074 bool canMakeOtherLost,
7076 VmaAllocationRequest* pAllocationRequest);
7077 bool CreateAllocationRequest_UpperAddress(
7078 uint32_t currentFrameIndex,
7079 uint32_t frameInUseCount,
7080 VkDeviceSize bufferImageGranularity,
7081 VkDeviceSize allocSize,
7082 VkDeviceSize allocAlignment,
7083 VmaSuballocationType allocType,
7084 bool canMakeOtherLost,
7086 VmaAllocationRequest* pAllocationRequest);
7100 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
7102 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
7105 virtual ~VmaBlockMetadata_Buddy();
7106 virtual void Init(VkDeviceSize size);
7108 virtual bool Validate()
const;
7109 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
7110 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
7111 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7112 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
7114 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7115 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7117 #if VMA_STATS_STRING_ENABLED
7118 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7121 virtual bool CreateAllocationRequest(
7122 uint32_t currentFrameIndex,
7123 uint32_t frameInUseCount,
7124 VkDeviceSize bufferImageGranularity,
7125 VkDeviceSize allocSize,
7126 VkDeviceSize allocAlignment,
7128 VmaSuballocationType allocType,
7129 bool canMakeOtherLost,
7131 VmaAllocationRequest* pAllocationRequest);
7133 virtual bool MakeRequestedAllocationsLost(
7134 uint32_t currentFrameIndex,
7135 uint32_t frameInUseCount,
7136 VmaAllocationRequest* pAllocationRequest);
7138 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7140 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
7143 const VmaAllocationRequest& request,
7144 VmaSuballocationType type,
7145 VkDeviceSize allocSize,
7148 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
7149 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
7152 static const VkDeviceSize MIN_NODE_SIZE = 32;
7153 static const size_t MAX_LEVELS = 30;
7155 struct ValidationContext
7157 size_t calculatedAllocationCount;
7158 size_t calculatedFreeCount;
7159 VkDeviceSize calculatedSumFreeSize;
7161 ValidationContext() :
7162 calculatedAllocationCount(0),
7163 calculatedFreeCount(0),
7164 calculatedSumFreeSize(0) { }
7169 VkDeviceSize offset;
7199 VkDeviceSize m_UsableSize;
7200 uint32_t m_LevelCount;
7206 } m_FreeList[MAX_LEVELS];
7208 size_t m_AllocationCount;
7212 VkDeviceSize m_SumFreeSize;
7214 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
7215 void DeleteNode(Node* node);
7216 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
7217 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
7218 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
7220 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
7221 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
7225 void AddToFreeListFront(uint32_t level, Node* node);
7229 void RemoveFromFreeList(uint32_t level, Node* node);
7231 #if VMA_STATS_STRING_ENABLED
7232 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
7242 class VmaDeviceMemoryBlock
7244 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
7246 VmaBlockMetadata* m_pMetadata;
7250 ~VmaDeviceMemoryBlock()
7252 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
7253 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
7260 uint32_t newMemoryTypeIndex,
7261 VkDeviceMemory newMemory,
7262 VkDeviceSize newSize,
7264 uint32_t algorithm);
7268 VmaPool GetParentPool()
const {
return m_hParentPool; }
7269 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
7270 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7271 uint32_t GetId()
const {
return m_Id; }
7272 void* GetMappedData()
const {
return m_pMappedData; }
7275 bool Validate()
const;
7280 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
7283 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7284 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7286 VkResult BindBufferMemory(
7289 VkDeviceSize allocationLocalOffset,
7292 VkResult BindImageMemory(
7295 VkDeviceSize allocationLocalOffset,
7301 uint32_t m_MemoryTypeIndex;
7303 VkDeviceMemory m_hMemory;
7311 uint32_t m_MapCount;
7312 void* m_pMappedData;
7315 struct VmaDefragmentationMove
7317 size_t srcBlockIndex;
7318 size_t dstBlockIndex;
7319 VkDeviceSize srcOffset;
7320 VkDeviceSize dstOffset;
7323 VmaDeviceMemoryBlock* pSrcBlock;
7324 VmaDeviceMemoryBlock* pDstBlock;
7327 class VmaDefragmentationAlgorithm;
7335 struct VmaBlockVector
7337 VMA_CLASS_NO_COPY(VmaBlockVector)
7342 uint32_t memoryTypeIndex,
7343 VkDeviceSize preferredBlockSize,
7344 size_t minBlockCount,
7345 size_t maxBlockCount,
7346 VkDeviceSize bufferImageGranularity,
7347 uint32_t frameInUseCount,
7348 bool explicitBlockSize,
7351 VkDeviceSize minAllocationAlignment,
7352 void* pMemoryAllocateNext);
7355 VkResult CreateMinBlocks();
7357 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7358 VmaPool GetParentPool()
const {
return m_hParentPool; }
7359 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7360 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7361 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7362 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7363 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7364 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7369 bool IsCorruptionDetectionEnabled()
const;
7372 uint32_t currentFrameIndex,
7374 VkDeviceSize alignment,
7376 VmaSuballocationType suballocType,
7377 size_t allocationCount,
7385 #if VMA_STATS_STRING_ENABLED
7386 void PrintDetailedMap(
class VmaJsonWriter& json);
7389 void MakePoolAllocationsLost(
7390 uint32_t currentFrameIndex,
7391 size_t* pLostAllocationCount);
7392 VkResult CheckCorruption();
7396 class VmaBlockVectorDefragmentationContext* pCtx,
7398 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7399 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7400 VkCommandBuffer commandBuffer);
7401 void DefragmentationEnd(
7402 class VmaBlockVectorDefragmentationContext* pCtx,
7406 uint32_t ProcessDefragmentations(
7407 class VmaBlockVectorDefragmentationContext *pCtx,
7410 void CommitDefragmentations(
7411 class VmaBlockVectorDefragmentationContext *pCtx,
7417 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7418 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7419 size_t CalcAllocationCount()
const;
7420 bool IsBufferImageGranularityConflictPossible()
const;
7423 friend class VmaDefragmentationAlgorithm_Generic;
7427 const uint32_t m_MemoryTypeIndex;
7428 const VkDeviceSize m_PreferredBlockSize;
7429 const size_t m_MinBlockCount;
7430 const size_t m_MaxBlockCount;
7431 const VkDeviceSize m_BufferImageGranularity;
7432 const uint32_t m_FrameInUseCount;
7433 const bool m_ExplicitBlockSize;
7434 const uint32_t m_Algorithm;
7435 const float m_Priority;
7436 const VkDeviceSize m_MinAllocationAlignment;
7437 void*
const m_pMemoryAllocateNext;
7438 VMA_RW_MUTEX m_Mutex;
7442 bool m_HasEmptyBlock;
7444 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7445 uint32_t m_NextBlockId;
7447 VkDeviceSize CalcMaxBlockSize()
const;
7450 void Remove(VmaDeviceMemoryBlock* pBlock);
7454 void IncrementallySortBlocks();
7456 VkResult AllocatePage(
7457 uint32_t currentFrameIndex,
7459 VkDeviceSize alignment,
7461 VmaSuballocationType suballocType,
7465 VkResult AllocateFromBlock(
7466 VmaDeviceMemoryBlock* pBlock,
7467 uint32_t currentFrameIndex,
7469 VkDeviceSize alignment,
7472 VmaSuballocationType suballocType,
7476 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7479 void ApplyDefragmentationMovesCpu(
7480 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7481 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7483 void ApplyDefragmentationMovesGpu(
7484 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7485 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7486 VkCommandBuffer commandBuffer);
7494 void UpdateHasEmptyBlock();
7499 VMA_CLASS_NO_COPY(VmaPool_T)
7501 VmaBlockVector m_BlockVector;
7506 VkDeviceSize preferredBlockSize);
7509 uint32_t GetId()
const {
return m_Id; }
7510 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7512 const char* GetName()
const {
return m_Name; }
7513 void SetName(
const char* pName);
7515 #if VMA_STATS_STRING_ENABLED
7522 VmaPool_T* m_PrevPool = VMA_NULL;
7523 VmaPool_T* m_NextPool = VMA_NULL;
7524 friend struct VmaPoolListItemTraits;
7527 struct VmaPoolListItemTraits
7529 typedef VmaPool_T ItemType;
7530 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
7531 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
7532 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
7533 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
7543 class VmaDefragmentationAlgorithm
7545 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7547 VmaDefragmentationAlgorithm(
7549 VmaBlockVector* pBlockVector,
7550 uint32_t currentFrameIndex) :
7551 m_hAllocator(hAllocator),
7552 m_pBlockVector(pBlockVector),
7553 m_CurrentFrameIndex(currentFrameIndex)
7556 virtual ~VmaDefragmentationAlgorithm()
7560 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7561 virtual void AddAll() = 0;
7563 virtual VkResult Defragment(
7564 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7565 VkDeviceSize maxBytesToMove,
7566 uint32_t maxAllocationsToMove,
7569 virtual VkDeviceSize GetBytesMoved()
const = 0;
7570 virtual uint32_t GetAllocationsMoved()
const = 0;
7574 VmaBlockVector*
const m_pBlockVector;
7575 const uint32_t m_CurrentFrameIndex;
7577 struct AllocationInfo
7580 VkBool32* m_pChanged;
7583 m_hAllocation(VK_NULL_HANDLE),
7584 m_pChanged(VMA_NULL)
7588 m_hAllocation(hAlloc),
7589 m_pChanged(pChanged)
7595 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7597 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7599 VmaDefragmentationAlgorithm_Generic(
7601 VmaBlockVector* pBlockVector,
7602 uint32_t currentFrameIndex,
7603 bool overlappingMoveSupported);
7604 virtual ~VmaDefragmentationAlgorithm_Generic();
7606 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7607 virtual void AddAll() { m_AllAllocations =
true; }
7609 virtual VkResult Defragment(
7610 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7611 VkDeviceSize maxBytesToMove,
7612 uint32_t maxAllocationsToMove,
7615 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7616 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7619 uint32_t m_AllocationCount;
7620 bool m_AllAllocations;
7622 VkDeviceSize m_BytesMoved;
7623 uint32_t m_AllocationsMoved;
7625 struct AllocationInfoSizeGreater
7627 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7629 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7633 struct AllocationInfoOffsetGreater
7635 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7637 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7643 size_t m_OriginalBlockIndex;
7644 VmaDeviceMemoryBlock* m_pBlock;
7645 bool m_HasNonMovableAllocations;
7646 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7648 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7649 m_OriginalBlockIndex(SIZE_MAX),
7651 m_HasNonMovableAllocations(true),
7652 m_Allocations(pAllocationCallbacks)
7656 void CalcHasNonMovableAllocations()
7658 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7659 const size_t defragmentAllocCount = m_Allocations.size();
7660 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7663 void SortAllocationsBySizeDescending()
7665 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7668 void SortAllocationsByOffsetDescending()
7670 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7674 struct BlockPointerLess
7676 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7678 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7680 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7682 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7688 struct BlockInfoCompareMoveDestination
7690 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7692 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7696 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7700 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7708 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7709 BlockInfoVector m_Blocks;
7711 VkResult DefragmentRound(
7712 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7713 VkDeviceSize maxBytesToMove,
7714 uint32_t maxAllocationsToMove,
7715 bool freeOldAllocations);
7717 size_t CalcBlocksWithNonMovableCount()
const;
7719 static bool MoveMakesSense(
7720 size_t dstBlockIndex, VkDeviceSize dstOffset,
7721 size_t srcBlockIndex, VkDeviceSize srcOffset);
7724 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7726 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7728 VmaDefragmentationAlgorithm_Fast(
7730 VmaBlockVector* pBlockVector,
7731 uint32_t currentFrameIndex,
7732 bool overlappingMoveSupported);
7733 virtual ~VmaDefragmentationAlgorithm_Fast();
7735 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7736 virtual void AddAll() { m_AllAllocations =
true; }
7738 virtual VkResult Defragment(
7739 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7740 VkDeviceSize maxBytesToMove,
7741 uint32_t maxAllocationsToMove,
7744 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7745 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7750 size_t origBlockIndex;
7753 class FreeSpaceDatabase
7759 s.blockInfoIndex = SIZE_MAX;
7760 for(
size_t i = 0; i < MAX_COUNT; ++i)
7762 m_FreeSpaces[i] = s;
7766 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7768 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7774 size_t bestIndex = SIZE_MAX;
7775 for(
size_t i = 0; i < MAX_COUNT; ++i)
7778 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7783 if(m_FreeSpaces[i].size < size &&
7784 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7790 if(bestIndex != SIZE_MAX)
7792 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7793 m_FreeSpaces[bestIndex].offset = offset;
7794 m_FreeSpaces[bestIndex].size = size;
7798 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7799 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7801 size_t bestIndex = SIZE_MAX;
7802 VkDeviceSize bestFreeSpaceAfter = 0;
7803 for(
size_t i = 0; i < MAX_COUNT; ++i)
7806 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7808 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7810 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7812 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7814 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7817 bestFreeSpaceAfter = freeSpaceAfter;
7823 if(bestIndex != SIZE_MAX)
7825 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7826 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7828 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7831 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7832 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7833 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7838 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7848 static const size_t MAX_COUNT = 4;
7852 size_t blockInfoIndex;
7853 VkDeviceSize offset;
7855 } m_FreeSpaces[MAX_COUNT];
7858 const bool m_OverlappingMoveSupported;
7860 uint32_t m_AllocationCount;
7861 bool m_AllAllocations;
7863 VkDeviceSize m_BytesMoved;
7864 uint32_t m_AllocationsMoved;
7866 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7868 void PreprocessMetadata();
7869 void PostprocessMetadata();
7870 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7873 struct VmaBlockDefragmentationContext
7877 BLOCK_FLAG_USED = 0x00000001,
7883 class VmaBlockVectorDefragmentationContext
7885 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7889 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7890 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7891 uint32_t defragmentationMovesProcessed;
7892 uint32_t defragmentationMovesCommitted;
7893 bool hasDefragmentationPlan;
7895 VmaBlockVectorDefragmentationContext(
7898 VmaBlockVector* pBlockVector,
7899 uint32_t currFrameIndex);
7900 ~VmaBlockVectorDefragmentationContext();
7902 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7903 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7904 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7906 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7907 void AddAll() { m_AllAllocations =
true; }
7916 VmaBlockVector*
const m_pBlockVector;
7917 const uint32_t m_CurrFrameIndex;
7919 VmaDefragmentationAlgorithm* m_pAlgorithm;
7927 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7928 bool m_AllAllocations;
7931 struct VmaDefragmentationContext_T
7934 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7936 VmaDefragmentationContext_T(
7938 uint32_t currFrameIndex,
7941 ~VmaDefragmentationContext_T();
7943 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7944 void AddAllocations(
7945 uint32_t allocationCount,
7947 VkBool32* pAllocationsChanged);
7955 VkResult Defragment(
7956 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7957 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7961 VkResult DefragmentPassEnd();
7965 const uint32_t m_CurrFrameIndex;
7966 const uint32_t m_Flags;
7969 VkDeviceSize m_MaxCpuBytesToMove;
7970 uint32_t m_MaxCpuAllocationsToMove;
7971 VkDeviceSize m_MaxGpuBytesToMove;
7972 uint32_t m_MaxGpuAllocationsToMove;
7975 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7977 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7980 #if VMA_RECORDING_ENABLED
7987 void WriteConfiguration(
7988 const VkPhysicalDeviceProperties& devProps,
7989 const VkPhysicalDeviceMemoryProperties& memProps,
7990 uint32_t vulkanApiVersion,
7991 bool dedicatedAllocationExtensionEnabled,
7992 bool bindMemory2ExtensionEnabled,
7993 bool memoryBudgetExtensionEnabled,
7994 bool deviceCoherentMemoryExtensionEnabled);
7997 void RecordCreateAllocator(uint32_t frameIndex);
7998 void RecordDestroyAllocator(uint32_t frameIndex);
7999 void RecordCreatePool(uint32_t frameIndex,
8002 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
8003 void RecordAllocateMemory(uint32_t frameIndex,
8004 const VkMemoryRequirements& vkMemReq,
8007 void RecordAllocateMemoryPages(uint32_t frameIndex,
8008 const VkMemoryRequirements& vkMemReq,
8010 uint64_t allocationCount,
8012 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
8013 const VkMemoryRequirements& vkMemReq,
8014 bool requiresDedicatedAllocation,
8015 bool prefersDedicatedAllocation,
8018 void RecordAllocateMemoryForImage(uint32_t frameIndex,
8019 const VkMemoryRequirements& vkMemReq,
8020 bool requiresDedicatedAllocation,
8021 bool prefersDedicatedAllocation,
8024 void RecordFreeMemory(uint32_t frameIndex,
8026 void RecordFreeMemoryPages(uint32_t frameIndex,
8027 uint64_t allocationCount,
8029 void RecordSetAllocationUserData(uint32_t frameIndex,
8031 const void* pUserData);
8032 void RecordCreateLostAllocation(uint32_t frameIndex,
8034 void RecordMapMemory(uint32_t frameIndex,
8036 void RecordUnmapMemory(uint32_t frameIndex,
8038 void RecordFlushAllocation(uint32_t frameIndex,
8039 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8040 void RecordInvalidateAllocation(uint32_t frameIndex,
8041 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8042 void RecordCreateBuffer(uint32_t frameIndex,
8043 const VkBufferCreateInfo& bufCreateInfo,
8046 void RecordCreateImage(uint32_t frameIndex,
8047 const VkImageCreateInfo& imageCreateInfo,
8050 void RecordDestroyBuffer(uint32_t frameIndex,
8052 void RecordDestroyImage(uint32_t frameIndex,
8054 void RecordTouchAllocation(uint32_t frameIndex,
8056 void RecordGetAllocationInfo(uint32_t frameIndex,
8058 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
8060 void RecordDefragmentationBegin(uint32_t frameIndex,
8063 void RecordDefragmentationEnd(uint32_t frameIndex,
8065 void RecordSetPoolName(uint32_t frameIndex,
8076 class UserDataString
8080 const char* GetString()
const {
return m_Str; }
8090 VMA_MUTEX m_FileMutex;
8091 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
8093 void GetBasicParams(CallParams& outParams);
8096 template<
typename T>
8097 void PrintPointerList(uint64_t count,
const T* pItems)
8101 fprintf(m_File,
"%p", pItems[0]);
8102 for(uint64_t i = 1; i < count; ++i)
8104 fprintf(m_File,
" %p", pItems[i]);
8109 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
8118 class VmaAllocationObjectAllocator
8120 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
8122 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
8124 template<
typename... Types>
VmaAllocation Allocate(Types... args);
8129 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
8132 struct VmaCurrentBudgetData
8134 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
8135 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
8137 #if VMA_MEMORY_BUDGET
8138 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
8139 VMA_RW_MUTEX m_BudgetMutex;
8140 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
8141 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
8142 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
8145 VmaCurrentBudgetData()
8147 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
8149 m_BlockBytes[heapIndex] = 0;
8150 m_AllocationBytes[heapIndex] = 0;
8151 #if VMA_MEMORY_BUDGET
8152 m_VulkanUsage[heapIndex] = 0;
8153 m_VulkanBudget[heapIndex] = 0;
8154 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
8158 #if VMA_MEMORY_BUDGET
8159 m_OperationsSinceBudgetFetch = 0;
8163 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8165 m_AllocationBytes[heapIndex] += allocationSize;
8166 #if VMA_MEMORY_BUDGET
8167 ++m_OperationsSinceBudgetFetch;
8171 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8173 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
8174 m_AllocationBytes[heapIndex] -= allocationSize;
8175 #if VMA_MEMORY_BUDGET
8176 ++m_OperationsSinceBudgetFetch;
8182 struct VmaAllocator_T
8184 VMA_CLASS_NO_COPY(VmaAllocator_T)
8187 uint32_t m_VulkanApiVersion;
8188 bool m_UseKhrDedicatedAllocation;
8189 bool m_UseKhrBindMemory2;
8190 bool m_UseExtMemoryBudget;
8191 bool m_UseAmdDeviceCoherentMemory;
8192 bool m_UseKhrBufferDeviceAddress;
8193 bool m_UseExtMemoryPriority;
8195 VkInstance m_hInstance;
8196 bool m_AllocationCallbacksSpecified;
8197 VkAllocationCallbacks m_AllocationCallbacks;
8199 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
8202 uint32_t m_HeapSizeLimitMask;
8204 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
8205 VkPhysicalDeviceMemoryProperties m_MemProps;
8208 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
8210 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
8211 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
8212 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
8214 VmaCurrentBudgetData m_Budget;
8215 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
8221 const VkAllocationCallbacks* GetAllocationCallbacks()
const
8223 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
8227 return m_VulkanFunctions;
8230 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
8232 VkDeviceSize GetBufferImageGranularity()
const
8235 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
8236 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
8239 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
8240 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
8242 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
8244 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
8245 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
8248 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
8250 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
8251 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8254 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
8256 return IsMemoryTypeNonCoherent(memTypeIndex) ?
8257 VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
8258 (VkDeviceSize)VMA_MIN_ALIGNMENT;
8261 bool IsIntegratedGpu()
const
8263 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
8266 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
8268 #if VMA_RECORDING_ENABLED
8269 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
8272 void GetBufferMemoryRequirements(
8274 VkMemoryRequirements& memReq,
8275 bool& requiresDedicatedAllocation,
8276 bool& prefersDedicatedAllocation)
const;
8277 void GetImageMemoryRequirements(
8279 VkMemoryRequirements& memReq,
8280 bool& requiresDedicatedAllocation,
8281 bool& prefersDedicatedAllocation)
const;
8284 VkResult AllocateMemory(
8285 const VkMemoryRequirements& vkMemReq,
8286 bool requiresDedicatedAllocation,
8287 bool prefersDedicatedAllocation,
8288 VkBuffer dedicatedBuffer,
8289 VkBufferUsageFlags dedicatedBufferUsage,
8290 VkImage dedicatedImage,
8292 VmaSuballocationType suballocType,
8293 size_t allocationCount,
8298 size_t allocationCount,
8301 void CalculateStats(
VmaStats* pStats);
8304 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
8306 #if VMA_STATS_STRING_ENABLED
8307 void PrintDetailedMap(
class VmaJsonWriter& json);
8310 VkResult DefragmentationBegin(
8314 VkResult DefragmentationEnd(
8317 VkResult DefragmentationPassBegin(
8320 VkResult DefragmentationPassEnd(
8327 void DestroyPool(
VmaPool pool);
8330 void SetCurrentFrameIndex(uint32_t frameIndex);
8331 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8333 void MakePoolAllocationsLost(
8335 size_t* pLostAllocationCount);
8336 VkResult CheckPoolCorruption(
VmaPool hPool);
8337 VkResult CheckCorruption(uint32_t memoryTypeBits);
8342 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8344 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8346 VkResult BindVulkanBuffer(
8347 VkDeviceMemory memory,
8348 VkDeviceSize memoryOffset,
8352 VkResult BindVulkanImage(
8353 VkDeviceMemory memory,
8354 VkDeviceSize memoryOffset,
8361 VkResult BindBufferMemory(
8363 VkDeviceSize allocationLocalOffset,
8366 VkResult BindImageMemory(
8368 VkDeviceSize allocationLocalOffset,
8372 VkResult FlushOrInvalidateAllocation(
8374 VkDeviceSize offset, VkDeviceSize size,
8375 VMA_CACHE_OPERATION op);
8376 VkResult FlushOrInvalidateAllocations(
8377 uint32_t allocationCount,
8379 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8380 VMA_CACHE_OPERATION op);
8382 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8388 uint32_t GetGpuDefragmentationMemoryTypeBits();
8392 VkDeviceSize m_PreferredLargeHeapBlockSize;
8394 VkPhysicalDevice m_PhysicalDevice;
8395 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8396 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8398 VMA_RW_MUTEX m_PoolsMutex;
8399 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
8402 uint32_t m_NextPoolId;
8407 uint32_t m_GlobalMemoryTypeBits;
8409 #if VMA_RECORDING_ENABLED
8410 VmaRecorder* m_pRecorder;
8415 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8416 void ImportVulkanFunctions_Static();
8421 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8422 void ImportVulkanFunctions_Dynamic();
8425 void ValidateVulkanFunctions();
8427 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8429 VkResult AllocateMemoryOfType(
8431 VkDeviceSize alignment,
8432 bool dedicatedAllocation,
8433 VkBuffer dedicatedBuffer,
8434 VkBufferUsageFlags dedicatedBufferUsage,
8435 VkImage dedicatedImage,
8437 uint32_t memTypeIndex,
8438 VmaSuballocationType suballocType,
8439 size_t allocationCount,
8443 VkResult AllocateDedicatedMemoryPage(
8445 VmaSuballocationType suballocType,
8446 uint32_t memTypeIndex,
8447 const VkMemoryAllocateInfo& allocInfo,
8449 bool isUserDataString,
8454 VkResult AllocateDedicatedMemory(
8456 VmaSuballocationType suballocType,
8457 uint32_t memTypeIndex,
8460 bool isUserDataString,
8463 VkBuffer dedicatedBuffer,
8464 VkBufferUsageFlags dedicatedBufferUsage,
8465 VkImage dedicatedImage,
8466 size_t allocationCount,
8475 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8477 uint32_t CalculateGlobalMemoryTypeBits()
const;
8479 bool GetFlushOrInvalidateRange(
8481 VkDeviceSize offset, VkDeviceSize size,
8482 VkMappedMemoryRange& outRange)
const;
8484 #if VMA_MEMORY_BUDGET
8485 void UpdateVulkanBudget();
8492 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8494 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8497 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8499 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8502 template<
typename T>
8505 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8508 template<
typename T>
8509 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8511 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8514 template<
typename T>
8515 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8520 VmaFree(hAllocator, ptr);
8524 template<
typename T>
8525 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8529 for(
size_t i = count; i--; )
8531 VmaFree(hAllocator, ptr);
8538 #if VMA_STATS_STRING_ENABLED
8540 class VmaStringBuilder
8543 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8544 size_t GetLength()
const {
return m_Data.size(); }
8545 const char* GetData()
const {
return m_Data.data(); }
8547 void Add(
char ch) { m_Data.push_back(ch); }
8548 void Add(
const char* pStr);
8549 void AddNewLine() { Add(
'\n'); }
8550 void AddNumber(uint32_t num);
8551 void AddNumber(uint64_t num);
8552 void AddPointer(
const void* ptr);
8555 VmaVector< char, VmaStlAllocator<char> > m_Data;
8558 void VmaStringBuilder::Add(
const char* pStr)
8560 const size_t strLen = strlen(pStr);
8563 const size_t oldCount = m_Data.size();
8564 m_Data.resize(oldCount + strLen);
8565 memcpy(m_Data.data() + oldCount, pStr, strLen);
8569 void VmaStringBuilder::AddNumber(uint32_t num)
8576 *--p =
'0' + (num % 10);
8583 void VmaStringBuilder::AddNumber(uint64_t num)
8590 *--p =
'0' + (num % 10);
8597 void VmaStringBuilder::AddPointer(
const void* ptr)
8600 VmaPtrToStr(buf,
sizeof(buf), ptr);
8609 #if VMA_STATS_STRING_ENABLED
8613 VMA_CLASS_NO_COPY(VmaJsonWriter)
8615 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8618 void BeginObject(
bool singleLine =
false);
8621 void BeginArray(
bool singleLine =
false);
8624 void WriteString(
const char* pStr);
8625 void BeginString(
const char* pStr = VMA_NULL);
8626 void ContinueString(
const char* pStr);
8627 void ContinueString(uint32_t n);
8628 void ContinueString(uint64_t n);
8629 void ContinueString_Pointer(
const void* ptr);
8630 void EndString(
const char* pStr = VMA_NULL);
8632 void WriteNumber(uint32_t n);
8633 void WriteNumber(uint64_t n);
8634 void WriteBool(
bool b);
8638 static const char*
const INDENT;
8640 enum COLLECTION_TYPE
8642 COLLECTION_TYPE_OBJECT,
8643 COLLECTION_TYPE_ARRAY,
8647 COLLECTION_TYPE type;
8648 uint32_t valueCount;
8649 bool singleLineMode;
8652 VmaStringBuilder& m_SB;
8653 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8654 bool m_InsideString;
8656 void BeginValue(
bool isString);
8657 void WriteIndent(
bool oneLess =
false);
8660 const char*
const VmaJsonWriter::INDENT =
" ";
8662 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8664 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8665 m_InsideString(false)
8669 VmaJsonWriter::~VmaJsonWriter()
8671 VMA_ASSERT(!m_InsideString);
8672 VMA_ASSERT(m_Stack.empty());
8675 void VmaJsonWriter::BeginObject(
bool singleLine)
8677 VMA_ASSERT(!m_InsideString);
8683 item.type = COLLECTION_TYPE_OBJECT;
8684 item.valueCount = 0;
8685 item.singleLineMode = singleLine;
8686 m_Stack.push_back(item);
8689 void VmaJsonWriter::EndObject()
8691 VMA_ASSERT(!m_InsideString);
8696 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8700 void VmaJsonWriter::BeginArray(
bool singleLine)
8702 VMA_ASSERT(!m_InsideString);
8708 item.type = COLLECTION_TYPE_ARRAY;
8709 item.valueCount = 0;
8710 item.singleLineMode = singleLine;
8711 m_Stack.push_back(item);
8714 void VmaJsonWriter::EndArray()
8716 VMA_ASSERT(!m_InsideString);
8721 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8725 void VmaJsonWriter::WriteString(
const char* pStr)
8731 void VmaJsonWriter::BeginString(
const char* pStr)
8733 VMA_ASSERT(!m_InsideString);
8737 m_InsideString =
true;
8738 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8740 ContinueString(pStr);
8744 void VmaJsonWriter::ContinueString(
const char* pStr)
8746 VMA_ASSERT(m_InsideString);
8748 const size_t strLen = strlen(pStr);
8749 for(
size_t i = 0; i < strLen; ++i)
8782 VMA_ASSERT(0 &&
"Character not currently supported.");
8788 void VmaJsonWriter::ContinueString(uint32_t n)
8790 VMA_ASSERT(m_InsideString);
8794 void VmaJsonWriter::ContinueString(uint64_t n)
8796 VMA_ASSERT(m_InsideString);
8800 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8802 VMA_ASSERT(m_InsideString);
8803 m_SB.AddPointer(ptr);
8806 void VmaJsonWriter::EndString(
const char* pStr)
8808 VMA_ASSERT(m_InsideString);
8809 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8811 ContinueString(pStr);
8814 m_InsideString =
false;
8817 void VmaJsonWriter::WriteNumber(uint32_t n)
8819 VMA_ASSERT(!m_InsideString);
8824 void VmaJsonWriter::WriteNumber(uint64_t n)
8826 VMA_ASSERT(!m_InsideString);
8831 void VmaJsonWriter::WriteBool(
bool b)
8833 VMA_ASSERT(!m_InsideString);
8835 m_SB.Add(b ?
"true" :
"false");
8838 void VmaJsonWriter::WriteNull()
8840 VMA_ASSERT(!m_InsideString);
8845 void VmaJsonWriter::BeginValue(
bool isString)
8847 if(!m_Stack.empty())
8849 StackItem& currItem = m_Stack.back();
8850 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8851 currItem.valueCount % 2 == 0)
8853 VMA_ASSERT(isString);
8856 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8857 currItem.valueCount % 2 != 0)
8861 else if(currItem.valueCount > 0)
8870 ++currItem.valueCount;
8874 void VmaJsonWriter::WriteIndent(
bool oneLess)
8876 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8880 size_t count = m_Stack.size();
8881 if(count > 0 && oneLess)
8885 for(
size_t i = 0; i < count; ++i)
8896 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8898 if(IsUserDataString())
8900 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8902 FreeUserDataString(hAllocator);
8904 if(pUserData != VMA_NULL)
8906 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8911 m_pUserData = pUserData;
8915 void VmaAllocation_T::ChangeBlockAllocation(
8917 VmaDeviceMemoryBlock* block,
8918 VkDeviceSize offset)
8920 VMA_ASSERT(block != VMA_NULL);
8921 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8924 if(block != m_BlockAllocation.m_Block)
8926 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8927 if(IsPersistentMap())
8929 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8930 block->Map(hAllocator, mapRefCount, VMA_NULL);
8933 m_BlockAllocation.m_Block = block;
8934 m_BlockAllocation.m_Offset = offset;
8937 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8939 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8940 m_BlockAllocation.m_Offset = newOffset;
8943 VkDeviceSize VmaAllocation_T::GetOffset()
const
8947 case ALLOCATION_TYPE_BLOCK:
8948 return m_BlockAllocation.m_Offset;
8949 case ALLOCATION_TYPE_DEDICATED:
8957 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8961 case ALLOCATION_TYPE_BLOCK:
8962 return m_BlockAllocation.m_Block->GetDeviceMemory();
8963 case ALLOCATION_TYPE_DEDICATED:
8964 return m_DedicatedAllocation.m_hMemory;
8967 return VK_NULL_HANDLE;
8971 void* VmaAllocation_T::GetMappedData()
const
8975 case ALLOCATION_TYPE_BLOCK:
8978 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8979 VMA_ASSERT(pBlockData != VMA_NULL);
8980 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8987 case ALLOCATION_TYPE_DEDICATED:
8988 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8989 return m_DedicatedAllocation.m_pMappedData;
8996 bool VmaAllocation_T::CanBecomeLost()
const
9000 case ALLOCATION_TYPE_BLOCK:
9001 return m_BlockAllocation.m_CanBecomeLost;
9002 case ALLOCATION_TYPE_DEDICATED:
9010 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9012 VMA_ASSERT(CanBecomeLost());
9018 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
9021 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9026 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
9032 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
9042 #if VMA_STATS_STRING_ENABLED
9045 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
9054 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
9056 json.WriteString(
"Type");
9057 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
9059 json.WriteString(
"Size");
9060 json.WriteNumber(m_Size);
9062 if(m_pUserData != VMA_NULL)
9064 json.WriteString(
"UserData");
9065 if(IsUserDataString())
9067 json.WriteString((
const char*)m_pUserData);
9072 json.ContinueString_Pointer(m_pUserData);
9077 json.WriteString(
"CreationFrameIndex");
9078 json.WriteNumber(m_CreationFrameIndex);
9080 json.WriteString(
"LastUseFrameIndex");
9081 json.WriteNumber(GetLastUseFrameIndex());
9083 if(m_BufferImageUsage != 0)
9085 json.WriteString(
"Usage");
9086 json.WriteNumber(m_BufferImageUsage);
9092 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
9094 VMA_ASSERT(IsUserDataString());
9095 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
9096 m_pUserData = VMA_NULL;
9099 void VmaAllocation_T::BlockAllocMap()
9101 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9103 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9109 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
9113 void VmaAllocation_T::BlockAllocUnmap()
9115 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9117 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9123 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
9127 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
9129 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9133 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9135 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
9136 *ppData = m_DedicatedAllocation.m_pMappedData;
9142 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
9143 return VK_ERROR_MEMORY_MAP_FAILED;
9148 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9149 hAllocator->m_hDevice,
9150 m_DedicatedAllocation.m_hMemory,
9155 if(result == VK_SUCCESS)
9157 m_DedicatedAllocation.m_pMappedData = *ppData;
9164 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
9166 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9168 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9173 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
9174 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
9175 hAllocator->m_hDevice,
9176 m_DedicatedAllocation.m_hMemory);
9181 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
9185 #if VMA_STATS_STRING_ENABLED
9187 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
9191 json.WriteString(
"Blocks");
9194 json.WriteString(
"Allocations");
9197 json.WriteString(
"UnusedRanges");
9200 json.WriteString(
"UsedBytes");
9203 json.WriteString(
"UnusedBytes");
9208 json.WriteString(
"AllocationSize");
9209 json.BeginObject(
true);
9210 json.WriteString(
"Min");
9212 json.WriteString(
"Avg");
9214 json.WriteString(
"Max");
9221 json.WriteString(
"UnusedRangeSize");
9222 json.BeginObject(
true);
9223 json.WriteString(
"Min");
9225 json.WriteString(
"Avg");
9227 json.WriteString(
"Max");
9237 struct VmaSuballocationItemSizeLess
9240 const VmaSuballocationList::iterator lhs,
9241 const VmaSuballocationList::iterator rhs)
const
9243 return lhs->size < rhs->size;
9246 const VmaSuballocationList::iterator lhs,
9247 VkDeviceSize rhsSize)
const
9249 return lhs->size < rhsSize;
9257 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
9259 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
9263 #if VMA_STATS_STRING_ENABLED
9265 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
9266 VkDeviceSize unusedBytes,
9267 size_t allocationCount,
9268 size_t unusedRangeCount)
const
9272 json.WriteString(
"TotalBytes");
9273 json.WriteNumber(GetSize());
9275 json.WriteString(
"UnusedBytes");
9276 json.WriteNumber(unusedBytes);
9278 json.WriteString(
"Allocations");
9279 json.WriteNumber((uint64_t)allocationCount);
9281 json.WriteString(
"UnusedRanges");
9282 json.WriteNumber((uint64_t)unusedRangeCount);
9284 json.WriteString(
"Suballocations");
9288 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
9289 VkDeviceSize offset,
9292 json.BeginObject(
true);
9294 json.WriteString(
"Offset");
9295 json.WriteNumber(offset);
9297 hAllocation->PrintParameters(json);
9302 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
9303 VkDeviceSize offset,
9304 VkDeviceSize size)
const
9306 json.BeginObject(
true);
9308 json.WriteString(
"Offset");
9309 json.WriteNumber(offset);
9311 json.WriteString(
"Type");
9312 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
9314 json.WriteString(
"Size");
9315 json.WriteNumber(size);
9320 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
9331 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
9332 VmaBlockMetadata(hAllocator),
9335 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9336 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9340 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9344 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9346 VmaBlockMetadata::Init(size);
9349 m_SumFreeSize = size;
9351 VmaSuballocation suballoc = {};
9352 suballoc.offset = 0;
9353 suballoc.size = size;
9354 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9355 suballoc.hAllocation = VK_NULL_HANDLE;
9357 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9358 m_Suballocations.push_back(suballoc);
9359 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9361 m_FreeSuballocationsBySize.push_back(suballocItem);
9364 bool VmaBlockMetadata_Generic::Validate()
const
9366 VMA_VALIDATE(!m_Suballocations.empty());
9369 VkDeviceSize calculatedOffset = 0;
9371 uint32_t calculatedFreeCount = 0;
9373 VkDeviceSize calculatedSumFreeSize = 0;
9376 size_t freeSuballocationsToRegister = 0;
9378 bool prevFree =
false;
9380 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9381 suballocItem != m_Suballocations.cend();
9384 const VmaSuballocation& subAlloc = *suballocItem;
9387 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9389 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9391 VMA_VALIDATE(!prevFree || !currFree);
9393 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9397 calculatedSumFreeSize += subAlloc.size;
9398 ++calculatedFreeCount;
9399 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9401 ++freeSuballocationsToRegister;
9405 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9409 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9410 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9413 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9416 calculatedOffset += subAlloc.size;
9417 prevFree = currFree;
9422 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9424 VkDeviceSize lastSize = 0;
9425 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9427 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9430 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9432 VMA_VALIDATE(suballocItem->size >= lastSize);
9434 lastSize = suballocItem->size;
9438 VMA_VALIDATE(ValidateFreeSuballocationList());
9439 VMA_VALIDATE(calculatedOffset == GetSize());
9440 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9441 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9446 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9448 if(!m_FreeSuballocationsBySize.empty())
9450 return m_FreeSuballocationsBySize.back()->size;
9458 bool VmaBlockMetadata_Generic::IsEmpty()
const
9460 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9463 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9467 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9479 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9480 suballocItem != m_Suballocations.cend();
9483 const VmaSuballocation& suballoc = *suballocItem;
9484 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9497 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9499 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9501 inoutStats.
size += GetSize();
9508 #if VMA_STATS_STRING_ENABLED
9510 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9512 PrintDetailedMap_Begin(json,
9514 m_Suballocations.size() - (
size_t)m_FreeCount,
9518 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9519 suballocItem != m_Suballocations.cend();
9520 ++suballocItem, ++i)
9522 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9524 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9528 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9532 PrintDetailedMap_End(json);
9537 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9538 uint32_t currentFrameIndex,
9539 uint32_t frameInUseCount,
9540 VkDeviceSize bufferImageGranularity,
9541 VkDeviceSize allocSize,
9542 VkDeviceSize allocAlignment,
9544 VmaSuballocationType allocType,
9545 bool canMakeOtherLost,
9547 VmaAllocationRequest* pAllocationRequest)
9549 VMA_ASSERT(allocSize > 0);
9550 VMA_ASSERT(!upperAddress);
9551 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9552 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9553 VMA_HEAVY_ASSERT(Validate());
9555 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9558 if(canMakeOtherLost ==
false &&
9559 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9565 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9566 if(freeSuballocCount > 0)
9571 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9572 m_FreeSuballocationsBySize.data(),
9573 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9574 allocSize + 2 * VMA_DEBUG_MARGIN,
9575 VmaSuballocationItemSizeLess());
9576 size_t index = it - m_FreeSuballocationsBySize.data();
9577 for(; index < freeSuballocCount; ++index)
9582 bufferImageGranularity,
9586 m_FreeSuballocationsBySize[index],
9588 &pAllocationRequest->offset,
9589 &pAllocationRequest->itemsToMakeLostCount,
9590 &pAllocationRequest->sumFreeSize,
9591 &pAllocationRequest->sumItemSize))
9593 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9598 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9600 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9601 it != m_Suballocations.end();
9604 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9607 bufferImageGranularity,
9613 &pAllocationRequest->offset,
9614 &pAllocationRequest->itemsToMakeLostCount,
9615 &pAllocationRequest->sumFreeSize,
9616 &pAllocationRequest->sumItemSize))
9618 pAllocationRequest->item = it;
9626 for(
size_t index = freeSuballocCount; index--; )
9631 bufferImageGranularity,
9635 m_FreeSuballocationsBySize[index],
9637 &pAllocationRequest->offset,
9638 &pAllocationRequest->itemsToMakeLostCount,
9639 &pAllocationRequest->sumFreeSize,
9640 &pAllocationRequest->sumItemSize))
9642 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9649 if(canMakeOtherLost)
9654 VmaAllocationRequest tmpAllocRequest = {};
9655 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9656 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9657 suballocIt != m_Suballocations.end();
9660 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9661 suballocIt->hAllocation->CanBecomeLost())
9666 bufferImageGranularity,
9672 &tmpAllocRequest.offset,
9673 &tmpAllocRequest.itemsToMakeLostCount,
9674 &tmpAllocRequest.sumFreeSize,
9675 &tmpAllocRequest.sumItemSize))
9679 *pAllocationRequest = tmpAllocRequest;
9680 pAllocationRequest->item = suballocIt;
9683 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9685 *pAllocationRequest = tmpAllocRequest;
9686 pAllocationRequest->item = suballocIt;
9699 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9700 uint32_t currentFrameIndex,
9701 uint32_t frameInUseCount,
9702 VmaAllocationRequest* pAllocationRequest)
9704 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9706 while(pAllocationRequest->itemsToMakeLostCount > 0)
9708 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9710 ++pAllocationRequest->item;
9712 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9713 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9714 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9715 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9717 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9718 --pAllocationRequest->itemsToMakeLostCount;
9726 VMA_HEAVY_ASSERT(Validate());
9727 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9728 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9733 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9735 uint32_t lostAllocationCount = 0;
9736 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9737 it != m_Suballocations.end();
9740 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9741 it->hAllocation->CanBecomeLost() &&
9742 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9744 it = FreeSuballocation(it);
9745 ++lostAllocationCount;
9748 return lostAllocationCount;
9751 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9753 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9754 it != m_Suballocations.end();
9757 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9759 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9761 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9762 return VK_ERROR_VALIDATION_FAILED_EXT;
9764 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9766 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9767 return VK_ERROR_VALIDATION_FAILED_EXT;
9775 void VmaBlockMetadata_Generic::Alloc(
9776 const VmaAllocationRequest& request,
9777 VmaSuballocationType type,
9778 VkDeviceSize allocSize,
9781 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9782 VMA_ASSERT(request.item != m_Suballocations.end());
9783 VmaSuballocation& suballoc = *request.item;
9785 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9787 VMA_ASSERT(request.offset >= suballoc.offset);
9788 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9789 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9790 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9794 UnregisterFreeSuballocation(request.item);
9796 suballoc.offset = request.offset;
9797 suballoc.size = allocSize;
9798 suballoc.type = type;
9799 suballoc.hAllocation = hAllocation;
9804 VmaSuballocation paddingSuballoc = {};
9805 paddingSuballoc.offset = request.offset + allocSize;
9806 paddingSuballoc.size = paddingEnd;
9807 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9808 VmaSuballocationList::iterator next = request.item;
9810 const VmaSuballocationList::iterator paddingEndItem =
9811 m_Suballocations.insert(next, paddingSuballoc);
9812 RegisterFreeSuballocation(paddingEndItem);
9818 VmaSuballocation paddingSuballoc = {};
9819 paddingSuballoc.offset = request.offset - paddingBegin;
9820 paddingSuballoc.size = paddingBegin;
9821 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9822 const VmaSuballocationList::iterator paddingBeginItem =
9823 m_Suballocations.insert(request.item, paddingSuballoc);
9824 RegisterFreeSuballocation(paddingBeginItem);
9828 m_FreeCount = m_FreeCount - 1;
9829 if(paddingBegin > 0)
9837 m_SumFreeSize -= allocSize;
9840 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9842 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9843 suballocItem != m_Suballocations.end();
9846 VmaSuballocation& suballoc = *suballocItem;
9847 if(suballoc.hAllocation == allocation)
9849 FreeSuballocation(suballocItem);
9850 VMA_HEAVY_ASSERT(Validate());
9854 VMA_ASSERT(0 &&
"Not found!");
9857 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9859 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9860 suballocItem != m_Suballocations.end();
9863 VmaSuballocation& suballoc = *suballocItem;
9864 if(suballoc.offset == offset)
9866 FreeSuballocation(suballocItem);
9870 VMA_ASSERT(0 &&
"Not found!");
9873 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9875 VkDeviceSize lastSize = 0;
9876 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9878 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9880 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9881 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9882 VMA_VALIDATE(it->size >= lastSize);
9883 lastSize = it->size;
9888 bool VmaBlockMetadata_Generic::CheckAllocation(
9889 uint32_t currentFrameIndex,
9890 uint32_t frameInUseCount,
9891 VkDeviceSize bufferImageGranularity,
9892 VkDeviceSize allocSize,
9893 VkDeviceSize allocAlignment,
9894 VmaSuballocationType allocType,
9895 VmaSuballocationList::const_iterator suballocItem,
9896 bool canMakeOtherLost,
9897 VkDeviceSize* pOffset,
9898 size_t* itemsToMakeLostCount,
9899 VkDeviceSize* pSumFreeSize,
9900 VkDeviceSize* pSumItemSize)
const
9902 VMA_ASSERT(allocSize > 0);
9903 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9904 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9905 VMA_ASSERT(pOffset != VMA_NULL);
9907 *itemsToMakeLostCount = 0;
9911 if(canMakeOtherLost)
9913 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9915 *pSumFreeSize = suballocItem->size;
9919 if(suballocItem->hAllocation->CanBecomeLost() &&
9920 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9922 ++*itemsToMakeLostCount;
9923 *pSumItemSize = suballocItem->size;
9932 if(GetSize() - suballocItem->offset < allocSize)
9938 *pOffset = suballocItem->offset;
9941 if(VMA_DEBUG_MARGIN > 0)
9943 *pOffset += VMA_DEBUG_MARGIN;
9947 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9951 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
9953 bool bufferImageGranularityConflict =
false;
9954 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9955 while(prevSuballocItem != m_Suballocations.cbegin())
9958 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9959 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9961 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9963 bufferImageGranularityConflict =
true;
9971 if(bufferImageGranularityConflict)
9973 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9979 if(*pOffset >= suballocItem->offset + suballocItem->size)
9985 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9988 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9990 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9992 if(suballocItem->offset + totalSize > GetSize())
9999 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
10000 if(totalSize > suballocItem->size)
10002 VkDeviceSize remainingSize = totalSize - suballocItem->size;
10003 while(remainingSize > 0)
10005 ++lastSuballocItem;
10006 if(lastSuballocItem == m_Suballocations.cend())
10010 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10012 *pSumFreeSize += lastSuballocItem->size;
10016 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
10017 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
10018 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10020 ++*itemsToMakeLostCount;
10021 *pSumItemSize += lastSuballocItem->size;
10028 remainingSize = (lastSuballocItem->size < remainingSize) ?
10029 remainingSize - lastSuballocItem->size : 0;
10035 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10037 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
10038 ++nextSuballocItem;
10039 while(nextSuballocItem != m_Suballocations.cend())
10041 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10042 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10044 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10046 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
10047 if(nextSuballoc.hAllocation->CanBecomeLost() &&
10048 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10050 ++*itemsToMakeLostCount;
10063 ++nextSuballocItem;
10069 const VmaSuballocation& suballoc = *suballocItem;
10070 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10072 *pSumFreeSize = suballoc.size;
10075 if(suballoc.size < allocSize)
10081 *pOffset = suballoc.offset;
10084 if(VMA_DEBUG_MARGIN > 0)
10086 *pOffset += VMA_DEBUG_MARGIN;
10090 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
10094 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
10096 bool bufferImageGranularityConflict =
false;
10097 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
10098 while(prevSuballocItem != m_Suballocations.cbegin())
10100 --prevSuballocItem;
10101 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10102 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10104 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10106 bufferImageGranularityConflict =
true;
10114 if(bufferImageGranularityConflict)
10116 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10121 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
10124 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10127 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
10134 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10136 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
10137 ++nextSuballocItem;
10138 while(nextSuballocItem != m_Suballocations.cend())
10140 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10141 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10143 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10153 ++nextSuballocItem;
10162 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
10164 VMA_ASSERT(item != m_Suballocations.end());
10165 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10167 VmaSuballocationList::iterator nextItem = item;
10169 VMA_ASSERT(nextItem != m_Suballocations.end());
10170 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
10172 item->size += nextItem->size;
10174 m_Suballocations.erase(nextItem);
10177 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
10180 VmaSuballocation& suballoc = *suballocItem;
10181 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10182 suballoc.hAllocation = VK_NULL_HANDLE;
10186 m_SumFreeSize += suballoc.size;
10189 bool mergeWithNext =
false;
10190 bool mergeWithPrev =
false;
10192 VmaSuballocationList::iterator nextItem = suballocItem;
10194 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
10196 mergeWithNext =
true;
10199 VmaSuballocationList::iterator prevItem = suballocItem;
10200 if(suballocItem != m_Suballocations.begin())
10203 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10205 mergeWithPrev =
true;
10211 UnregisterFreeSuballocation(nextItem);
10212 MergeFreeWithNext(suballocItem);
10217 UnregisterFreeSuballocation(prevItem);
10218 MergeFreeWithNext(prevItem);
10219 RegisterFreeSuballocation(prevItem);
10224 RegisterFreeSuballocation(suballocItem);
10225 return suballocItem;
10229 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
10231 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10232 VMA_ASSERT(item->size > 0);
10236 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10238 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10240 if(m_FreeSuballocationsBySize.empty())
10242 m_FreeSuballocationsBySize.push_back(item);
10246 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
10254 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
10256 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10257 VMA_ASSERT(item->size > 0);
10261 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10263 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10265 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
10266 m_FreeSuballocationsBySize.data(),
10267 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
10269 VmaSuballocationItemSizeLess());
10270 for(
size_t index = it - m_FreeSuballocationsBySize.data();
10271 index < m_FreeSuballocationsBySize.size();
10274 if(m_FreeSuballocationsBySize[index] == item)
10276 VmaVectorRemove(m_FreeSuballocationsBySize, index);
10279 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
10281 VMA_ASSERT(0 &&
"Not found.");
10287 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
10288 VkDeviceSize bufferImageGranularity,
10289 VmaSuballocationType& inOutPrevSuballocType)
const
10291 if(bufferImageGranularity == 1 || IsEmpty())
10296 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
10297 bool typeConflictFound =
false;
10298 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
10299 it != m_Suballocations.cend();
10302 const VmaSuballocationType suballocType = it->type;
10303 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
10305 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
10306 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
10308 typeConflictFound =
true;
10310 inOutPrevSuballocType = suballocType;
10314 return typeConflictFound || minAlignment >= bufferImageGranularity;
10320 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
10321 VmaBlockMetadata(hAllocator),
10323 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10324 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10325 m_1stVectorIndex(0),
10326 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
10327 m_1stNullItemsBeginCount(0),
10328 m_1stNullItemsMiddleCount(0),
10329 m_2ndNullItemsCount(0)
10333 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10337 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10339 VmaBlockMetadata::Init(size);
10340 m_SumFreeSize = size;
10343 bool VmaBlockMetadata_Linear::Validate()
const
10345 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10346 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10348 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10349 VMA_VALIDATE(!suballocations1st.empty() ||
10350 suballocations2nd.empty() ||
10351 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10353 if(!suballocations1st.empty())
10356 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10358 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10360 if(!suballocations2nd.empty())
10363 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10366 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10367 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10369 VkDeviceSize sumUsedSize = 0;
10370 const size_t suballoc1stCount = suballocations1st.size();
10371 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10373 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10375 const size_t suballoc2ndCount = suballocations2nd.size();
10376 size_t nullItem2ndCount = 0;
10377 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10379 const VmaSuballocation& suballoc = suballocations2nd[i];
10380 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10382 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10383 VMA_VALIDATE(suballoc.offset >= offset);
10387 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10388 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10389 sumUsedSize += suballoc.size;
10393 ++nullItem2ndCount;
10396 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10399 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10402 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10404 const VmaSuballocation& suballoc = suballocations1st[i];
10405 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10406 suballoc.hAllocation == VK_NULL_HANDLE);
10409 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10411 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10413 const VmaSuballocation& suballoc = suballocations1st[i];
10414 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10416 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10417 VMA_VALIDATE(suballoc.offset >= offset);
10418 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10422 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10423 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10424 sumUsedSize += suballoc.size;
10428 ++nullItem1stCount;
10431 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10433 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10435 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10437 const size_t suballoc2ndCount = suballocations2nd.size();
10438 size_t nullItem2ndCount = 0;
10439 for(
size_t i = suballoc2ndCount; i--; )
10441 const VmaSuballocation& suballoc = suballocations2nd[i];
10442 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10444 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10445 VMA_VALIDATE(suballoc.offset >= offset);
10449 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10450 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10451 sumUsedSize += suballoc.size;
10455 ++nullItem2ndCount;
10458 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10461 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10464 VMA_VALIDATE(offset <= GetSize());
10465 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10470 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10472 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10473 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10476 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10478 const VkDeviceSize size = GetSize();
10490 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10492 switch(m_2ndVectorMode)
10494 case SECOND_VECTOR_EMPTY:
10500 const size_t suballocations1stCount = suballocations1st.size();
10501 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10502 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10503 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10505 firstSuballoc.offset,
10506 size - (lastSuballoc.offset + lastSuballoc.size));
10510 case SECOND_VECTOR_RING_BUFFER:
10515 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10516 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10517 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10518 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10522 case SECOND_VECTOR_DOUBLE_STACK:
10527 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10528 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10529 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10530 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10540 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10542 const VkDeviceSize size = GetSize();
10543 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10544 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10545 const size_t suballoc1stCount = suballocations1st.size();
10546 const size_t suballoc2ndCount = suballocations2nd.size();
10557 VkDeviceSize lastOffset = 0;
10559 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10561 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10562 size_t nextAlloc2ndIndex = 0;
10563 while(lastOffset < freeSpace2ndTo1stEnd)
10566 while(nextAlloc2ndIndex < suballoc2ndCount &&
10567 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10569 ++nextAlloc2ndIndex;
10573 if(nextAlloc2ndIndex < suballoc2ndCount)
10575 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10578 if(lastOffset < suballoc.offset)
10581 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10595 lastOffset = suballoc.offset + suballoc.size;
10596 ++nextAlloc2ndIndex;
10602 if(lastOffset < freeSpace2ndTo1stEnd)
10604 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10612 lastOffset = freeSpace2ndTo1stEnd;
10617 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10618 const VkDeviceSize freeSpace1stTo2ndEnd =
10619 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10620 while(lastOffset < freeSpace1stTo2ndEnd)
10623 while(nextAlloc1stIndex < suballoc1stCount &&
10624 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10626 ++nextAlloc1stIndex;
10630 if(nextAlloc1stIndex < suballoc1stCount)
10632 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10635 if(lastOffset < suballoc.offset)
10638 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10652 lastOffset = suballoc.offset + suballoc.size;
10653 ++nextAlloc1stIndex;
10659 if(lastOffset < freeSpace1stTo2ndEnd)
10661 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10669 lastOffset = freeSpace1stTo2ndEnd;
10673 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10675 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10676 while(lastOffset < size)
10679 while(nextAlloc2ndIndex != SIZE_MAX &&
10680 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10682 --nextAlloc2ndIndex;
10686 if(nextAlloc2ndIndex != SIZE_MAX)
10688 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10691 if(lastOffset < suballoc.offset)
10694 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10708 lastOffset = suballoc.offset + suballoc.size;
10709 --nextAlloc2ndIndex;
10715 if(lastOffset < size)
10717 const VkDeviceSize unusedRangeSize = size - lastOffset;
10733 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10735 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10736 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10737 const VkDeviceSize size = GetSize();
10738 const size_t suballoc1stCount = suballocations1st.size();
10739 const size_t suballoc2ndCount = suballocations2nd.size();
10741 inoutStats.
size += size;
10743 VkDeviceSize lastOffset = 0;
10745 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10747 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10748 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10749 while(lastOffset < freeSpace2ndTo1stEnd)
10752 while(nextAlloc2ndIndex < suballoc2ndCount &&
10753 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10755 ++nextAlloc2ndIndex;
10759 if(nextAlloc2ndIndex < suballoc2ndCount)
10761 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10764 if(lastOffset < suballoc.offset)
10767 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10778 lastOffset = suballoc.offset + suballoc.size;
10779 ++nextAlloc2ndIndex;
10784 if(lastOffset < freeSpace2ndTo1stEnd)
10787 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10794 lastOffset = freeSpace2ndTo1stEnd;
10799 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10800 const VkDeviceSize freeSpace1stTo2ndEnd =
10801 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10802 while(lastOffset < freeSpace1stTo2ndEnd)
10805 while(nextAlloc1stIndex < suballoc1stCount &&
10806 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10808 ++nextAlloc1stIndex;
10812 if(nextAlloc1stIndex < suballoc1stCount)
10814 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10817 if(lastOffset < suballoc.offset)
10820 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10831 lastOffset = suballoc.offset + suballoc.size;
10832 ++nextAlloc1stIndex;
10837 if(lastOffset < freeSpace1stTo2ndEnd)
10840 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10847 lastOffset = freeSpace1stTo2ndEnd;
10851 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10853 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10854 while(lastOffset < size)
10857 while(nextAlloc2ndIndex != SIZE_MAX &&
10858 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10860 --nextAlloc2ndIndex;
10864 if(nextAlloc2ndIndex != SIZE_MAX)
10866 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10869 if(lastOffset < suballoc.offset)
10872 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10883 lastOffset = suballoc.offset + suballoc.size;
10884 --nextAlloc2ndIndex;
10889 if(lastOffset < size)
10892 const VkDeviceSize unusedRangeSize = size - lastOffset;
10905 #if VMA_STATS_STRING_ENABLED
10906 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10908 const VkDeviceSize size = GetSize();
10909 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10910 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10911 const size_t suballoc1stCount = suballocations1st.size();
10912 const size_t suballoc2ndCount = suballocations2nd.size();
10916 size_t unusedRangeCount = 0;
10917 VkDeviceSize usedBytes = 0;
10919 VkDeviceSize lastOffset = 0;
10921 size_t alloc2ndCount = 0;
10922 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10924 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10925 size_t nextAlloc2ndIndex = 0;
10926 while(lastOffset < freeSpace2ndTo1stEnd)
10929 while(nextAlloc2ndIndex < suballoc2ndCount &&
10930 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10932 ++nextAlloc2ndIndex;
10936 if(nextAlloc2ndIndex < suballoc2ndCount)
10938 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10941 if(lastOffset < suballoc.offset)
10944 ++unusedRangeCount;
10950 usedBytes += suballoc.size;
10953 lastOffset = suballoc.offset + suballoc.size;
10954 ++nextAlloc2ndIndex;
10959 if(lastOffset < freeSpace2ndTo1stEnd)
10962 ++unusedRangeCount;
10966 lastOffset = freeSpace2ndTo1stEnd;
10971 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10972 size_t alloc1stCount = 0;
10973 const VkDeviceSize freeSpace1stTo2ndEnd =
10974 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10975 while(lastOffset < freeSpace1stTo2ndEnd)
10978 while(nextAlloc1stIndex < suballoc1stCount &&
10979 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10981 ++nextAlloc1stIndex;
10985 if(nextAlloc1stIndex < suballoc1stCount)
10987 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10990 if(lastOffset < suballoc.offset)
10993 ++unusedRangeCount;
10999 usedBytes += suballoc.size;
11002 lastOffset = suballoc.offset + suballoc.size;
11003 ++nextAlloc1stIndex;
11008 if(lastOffset < size)
11011 ++unusedRangeCount;
11015 lastOffset = freeSpace1stTo2ndEnd;
11019 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11021 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11022 while(lastOffset < size)
11025 while(nextAlloc2ndIndex != SIZE_MAX &&
11026 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11028 --nextAlloc2ndIndex;
11032 if(nextAlloc2ndIndex != SIZE_MAX)
11034 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11037 if(lastOffset < suballoc.offset)
11040 ++unusedRangeCount;
11046 usedBytes += suballoc.size;
11049 lastOffset = suballoc.offset + suballoc.size;
11050 --nextAlloc2ndIndex;
11055 if(lastOffset < size)
11058 ++unusedRangeCount;
11067 const VkDeviceSize unusedBytes = size - usedBytes;
11068 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
11073 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11075 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
11076 size_t nextAlloc2ndIndex = 0;
11077 while(lastOffset < freeSpace2ndTo1stEnd)
11080 while(nextAlloc2ndIndex < suballoc2ndCount &&
11081 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11083 ++nextAlloc2ndIndex;
11087 if(nextAlloc2ndIndex < suballoc2ndCount)
11089 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11092 if(lastOffset < suballoc.offset)
11095 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11096 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11101 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11104 lastOffset = suballoc.offset + suballoc.size;
11105 ++nextAlloc2ndIndex;
11110 if(lastOffset < freeSpace2ndTo1stEnd)
11113 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
11114 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11118 lastOffset = freeSpace2ndTo1stEnd;
11123 nextAlloc1stIndex = m_1stNullItemsBeginCount;
11124 while(lastOffset < freeSpace1stTo2ndEnd)
11127 while(nextAlloc1stIndex < suballoc1stCount &&
11128 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11130 ++nextAlloc1stIndex;
11134 if(nextAlloc1stIndex < suballoc1stCount)
11136 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11139 if(lastOffset < suballoc.offset)
11142 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11143 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11148 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11151 lastOffset = suballoc.offset + suballoc.size;
11152 ++nextAlloc1stIndex;
11157 if(lastOffset < freeSpace1stTo2ndEnd)
11160 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
11161 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11165 lastOffset = freeSpace1stTo2ndEnd;
11169 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11171 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11172 while(lastOffset < size)
11175 while(nextAlloc2ndIndex != SIZE_MAX &&
11176 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11178 --nextAlloc2ndIndex;
11182 if(nextAlloc2ndIndex != SIZE_MAX)
11184 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11187 if(lastOffset < suballoc.offset)
11190 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11191 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11196 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11199 lastOffset = suballoc.offset + suballoc.size;
11200 --nextAlloc2ndIndex;
11205 if(lastOffset < size)
11208 const VkDeviceSize unusedRangeSize = size - lastOffset;
11209 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11218 PrintDetailedMap_End(json);
11222 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
11223 uint32_t currentFrameIndex,
11224 uint32_t frameInUseCount,
11225 VkDeviceSize bufferImageGranularity,
11226 VkDeviceSize allocSize,
11227 VkDeviceSize allocAlignment,
11229 VmaSuballocationType allocType,
11230 bool canMakeOtherLost,
11232 VmaAllocationRequest* pAllocationRequest)
11234 VMA_ASSERT(allocSize > 0);
11235 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
11236 VMA_ASSERT(pAllocationRequest != VMA_NULL);
11237 VMA_HEAVY_ASSERT(Validate());
11238 return upperAddress ?
11239 CreateAllocationRequest_UpperAddress(
11240 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11241 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
11242 CreateAllocationRequest_LowerAddress(
11243 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11244 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
11247 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
11248 uint32_t currentFrameIndex,
11249 uint32_t frameInUseCount,
11250 VkDeviceSize bufferImageGranularity,
11251 VkDeviceSize allocSize,
11252 VkDeviceSize allocAlignment,
11253 VmaSuballocationType allocType,
11254 bool canMakeOtherLost,
11256 VmaAllocationRequest* pAllocationRequest)
11258 const VkDeviceSize size = GetSize();
11259 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11260 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11262 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11264 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
11269 if(allocSize > size)
11273 VkDeviceSize resultBaseOffset = size - allocSize;
11274 if(!suballocations2nd.empty())
11276 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11277 resultBaseOffset = lastSuballoc.offset - allocSize;
11278 if(allocSize > lastSuballoc.offset)
11285 VkDeviceSize resultOffset = resultBaseOffset;
11288 if(VMA_DEBUG_MARGIN > 0)
11290 if(resultOffset < VMA_DEBUG_MARGIN)
11294 resultOffset -= VMA_DEBUG_MARGIN;
11298 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
11302 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11304 bool bufferImageGranularityConflict =
false;
11305 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11307 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11308 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11310 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
11312 bufferImageGranularityConflict =
true;
11320 if(bufferImageGranularityConflict)
11322 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
11327 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
11328 suballocations1st.back().offset + suballocations1st.back().size :
11330 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
11334 if(bufferImageGranularity > 1)
11336 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11338 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11339 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11341 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11355 pAllocationRequest->offset = resultOffset;
11356 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11357 pAllocationRequest->sumItemSize = 0;
11359 pAllocationRequest->itemsToMakeLostCount = 0;
11360 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11367 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11368 uint32_t currentFrameIndex,
11369 uint32_t frameInUseCount,
11370 VkDeviceSize bufferImageGranularity,
11371 VkDeviceSize allocSize,
11372 VkDeviceSize allocAlignment,
11373 VmaSuballocationType allocType,
11374 bool canMakeOtherLost,
11376 VmaAllocationRequest* pAllocationRequest)
11378 const VkDeviceSize size = GetSize();
11379 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11380 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11382 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11386 VkDeviceSize resultBaseOffset = 0;
11387 if(!suballocations1st.empty())
11389 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11390 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11394 VkDeviceSize resultOffset = resultBaseOffset;
11397 if(VMA_DEBUG_MARGIN > 0)
11399 resultOffset += VMA_DEBUG_MARGIN;
11403 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11407 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
11409 bool bufferImageGranularityConflict =
false;
11410 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11412 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11413 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11415 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11417 bufferImageGranularityConflict =
true;
11425 if(bufferImageGranularityConflict)
11427 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11431 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11432 suballocations2nd.back().offset : size;
11435 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11439 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11441 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11443 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11444 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11446 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11460 pAllocationRequest->offset = resultOffset;
11461 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11462 pAllocationRequest->sumItemSize = 0;
11464 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11465 pAllocationRequest->itemsToMakeLostCount = 0;
11472 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11474 VMA_ASSERT(!suballocations1st.empty());
11476 VkDeviceSize resultBaseOffset = 0;
11477 if(!suballocations2nd.empty())
11479 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11480 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11484 VkDeviceSize resultOffset = resultBaseOffset;
11487 if(VMA_DEBUG_MARGIN > 0)
11489 resultOffset += VMA_DEBUG_MARGIN;
11493 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11497 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11499 bool bufferImageGranularityConflict =
false;
11500 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11502 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11503 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11505 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11507 bufferImageGranularityConflict =
true;
11515 if(bufferImageGranularityConflict)
11517 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11521 pAllocationRequest->itemsToMakeLostCount = 0;
11522 pAllocationRequest->sumItemSize = 0;
11523 size_t index1st = m_1stNullItemsBeginCount;
11525 if(canMakeOtherLost)
11527 while(index1st < suballocations1st.size() &&
11528 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11531 const VmaSuballocation& suballoc = suballocations1st[index1st];
11532 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11538 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11539 if(suballoc.hAllocation->CanBecomeLost() &&
11540 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11542 ++pAllocationRequest->itemsToMakeLostCount;
11543 pAllocationRequest->sumItemSize += suballoc.size;
11555 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11557 while(index1st < suballocations1st.size())
11559 const VmaSuballocation& suballoc = suballocations1st[index1st];
11560 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11562 if(suballoc.hAllocation != VK_NULL_HANDLE)
11565 if(suballoc.hAllocation->CanBecomeLost() &&
11566 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11568 ++pAllocationRequest->itemsToMakeLostCount;
11569 pAllocationRequest->sumItemSize += suballoc.size;
11587 if(index1st == suballocations1st.size() &&
11588 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11591 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11596 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11597 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11601 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11603 for(
size_t nextSuballocIndex = index1st;
11604 nextSuballocIndex < suballocations1st.size();
11605 nextSuballocIndex++)
11607 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11608 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11610 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11624 pAllocationRequest->offset = resultOffset;
11625 pAllocationRequest->sumFreeSize =
11626 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11628 - pAllocationRequest->sumItemSize;
11629 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11638 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11639 uint32_t currentFrameIndex,
11640 uint32_t frameInUseCount,
11641 VmaAllocationRequest* pAllocationRequest)
11643 if(pAllocationRequest->itemsToMakeLostCount == 0)
11648 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11651 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11652 size_t index = m_1stNullItemsBeginCount;
11653 size_t madeLostCount = 0;
11654 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11656 if(index == suballocations->size())
11660 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11662 suballocations = &AccessSuballocations2nd();
11666 VMA_ASSERT(!suballocations->empty());
11668 VmaSuballocation& suballoc = (*suballocations)[index];
11669 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11671 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11672 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11673 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11675 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11676 suballoc.hAllocation = VK_NULL_HANDLE;
11677 m_SumFreeSize += suballoc.size;
11678 if(suballocations == &AccessSuballocations1st())
11680 ++m_1stNullItemsMiddleCount;
11684 ++m_2ndNullItemsCount;
11696 CleanupAfterFree();
11702 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11704 uint32_t lostAllocationCount = 0;
11706 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11707 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11709 VmaSuballocation& suballoc = suballocations1st[i];
11710 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11711 suballoc.hAllocation->CanBecomeLost() &&
11712 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11714 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11715 suballoc.hAllocation = VK_NULL_HANDLE;
11716 ++m_1stNullItemsMiddleCount;
11717 m_SumFreeSize += suballoc.size;
11718 ++lostAllocationCount;
11722 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11723 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11725 VmaSuballocation& suballoc = suballocations2nd[i];
11726 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11727 suballoc.hAllocation->CanBecomeLost() &&
11728 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11730 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11731 suballoc.hAllocation = VK_NULL_HANDLE;
11732 ++m_2ndNullItemsCount;
11733 m_SumFreeSize += suballoc.size;
11734 ++lostAllocationCount;
11738 if(lostAllocationCount)
11740 CleanupAfterFree();
11743 return lostAllocationCount;
11746 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11748 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11749 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11751 const VmaSuballocation& suballoc = suballocations1st[i];
11752 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11754 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11756 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11757 return VK_ERROR_VALIDATION_FAILED_EXT;
11759 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11761 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11762 return VK_ERROR_VALIDATION_FAILED_EXT;
11767 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11768 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11770 const VmaSuballocation& suballoc = suballocations2nd[i];
11771 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11773 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11775 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11776 return VK_ERROR_VALIDATION_FAILED_EXT;
11778 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11780 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11781 return VK_ERROR_VALIDATION_FAILED_EXT;
11789 void VmaBlockMetadata_Linear::Alloc(
11790 const VmaAllocationRequest& request,
11791 VmaSuballocationType type,
11792 VkDeviceSize allocSize,
11795 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11797 switch(request.type)
11799 case VmaAllocationRequestType::UpperAddress:
11801 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11802 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11803 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11804 suballocations2nd.push_back(newSuballoc);
11805 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11808 case VmaAllocationRequestType::EndOf1st:
11810 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11812 VMA_ASSERT(suballocations1st.empty() ||
11813 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11815 VMA_ASSERT(request.offset + allocSize <= GetSize());
11817 suballocations1st.push_back(newSuballoc);
11820 case VmaAllocationRequestType::EndOf2nd:
11822 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11824 VMA_ASSERT(!suballocations1st.empty() &&
11825 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11826 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11828 switch(m_2ndVectorMode)
11830 case SECOND_VECTOR_EMPTY:
11832 VMA_ASSERT(suballocations2nd.empty());
11833 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11835 case SECOND_VECTOR_RING_BUFFER:
11837 VMA_ASSERT(!suballocations2nd.empty());
11839 case SECOND_VECTOR_DOUBLE_STACK:
11840 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11846 suballocations2nd.push_back(newSuballoc);
11850 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11853 m_SumFreeSize -= newSuballoc.size;
11856 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11858 FreeAtOffset(allocation->GetOffset());
11861 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11863 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11864 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11866 if(!suballocations1st.empty())
11869 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11870 if(firstSuballoc.offset == offset)
11872 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11873 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11874 m_SumFreeSize += firstSuballoc.size;
11875 ++m_1stNullItemsBeginCount;
11876 CleanupAfterFree();
11882 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11883 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11885 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11886 if(lastSuballoc.offset == offset)
11888 m_SumFreeSize += lastSuballoc.size;
11889 suballocations2nd.pop_back();
11890 CleanupAfterFree();
11895 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11897 VmaSuballocation& lastSuballoc = suballocations1st.back();
11898 if(lastSuballoc.offset == offset)
11900 m_SumFreeSize += lastSuballoc.size;
11901 suballocations1st.pop_back();
11902 CleanupAfterFree();
11909 VmaSuballocation refSuballoc;
11910 refSuballoc.offset = offset;
11912 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11913 suballocations1st.begin() + m_1stNullItemsBeginCount,
11914 suballocations1st.end(),
11916 VmaSuballocationOffsetLess());
11917 if(it != suballocations1st.end())
11919 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11920 it->hAllocation = VK_NULL_HANDLE;
11921 ++m_1stNullItemsMiddleCount;
11922 m_SumFreeSize += it->size;
11923 CleanupAfterFree();
11928 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11931 VmaSuballocation refSuballoc;
11932 refSuballoc.offset = offset;
11934 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11935 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11936 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11937 if(it != suballocations2nd.end())
11939 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11940 it->hAllocation = VK_NULL_HANDLE;
11941 ++m_2ndNullItemsCount;
11942 m_SumFreeSize += it->size;
11943 CleanupAfterFree();
11948 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11951 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11953 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11954 const size_t suballocCount = AccessSuballocations1st().size();
11955 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11958 void VmaBlockMetadata_Linear::CleanupAfterFree()
11960 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11961 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11965 suballocations1st.clear();
11966 suballocations2nd.clear();
11967 m_1stNullItemsBeginCount = 0;
11968 m_1stNullItemsMiddleCount = 0;
11969 m_2ndNullItemsCount = 0;
11970 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11974 const size_t suballoc1stCount = suballocations1st.size();
11975 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11976 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11979 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11980 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11982 ++m_1stNullItemsBeginCount;
11983 --m_1stNullItemsMiddleCount;
11987 while(m_1stNullItemsMiddleCount > 0 &&
11988 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11990 --m_1stNullItemsMiddleCount;
11991 suballocations1st.pop_back();
11995 while(m_2ndNullItemsCount > 0 &&
11996 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11998 --m_2ndNullItemsCount;
11999 suballocations2nd.pop_back();
12003 while(m_2ndNullItemsCount > 0 &&
12004 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
12006 --m_2ndNullItemsCount;
12007 VmaVectorRemove(suballocations2nd, 0);
12010 if(ShouldCompact1st())
12012 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
12013 size_t srcIndex = m_1stNullItemsBeginCount;
12014 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
12016 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
12020 if(dstIndex != srcIndex)
12022 suballocations1st[dstIndex] = suballocations1st[srcIndex];
12026 suballocations1st.resize(nonNullItemCount);
12027 m_1stNullItemsBeginCount = 0;
12028 m_1stNullItemsMiddleCount = 0;
12032 if(suballocations2nd.empty())
12034 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12038 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
12040 suballocations1st.clear();
12041 m_1stNullItemsBeginCount = 0;
12043 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
12046 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12047 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
12048 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
12049 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12051 ++m_1stNullItemsBeginCount;
12052 --m_1stNullItemsMiddleCount;
12054 m_2ndNullItemsCount = 0;
12055 m_1stVectorIndex ^= 1;
12060 VMA_HEAVY_ASSERT(Validate());
12067 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
12068 VmaBlockMetadata(hAllocator),
12070 m_AllocationCount(0),
12074 memset(m_FreeList, 0,
sizeof(m_FreeList));
12077 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
12079 DeleteNode(m_Root);
12082 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
12084 VmaBlockMetadata::Init(size);
12086 m_UsableSize = VmaPrevPow2(size);
12087 m_SumFreeSize = m_UsableSize;
12091 while(m_LevelCount < MAX_LEVELS &&
12092 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
12097 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
12098 rootNode->offset = 0;
12099 rootNode->type = Node::TYPE_FREE;
12100 rootNode->parent = VMA_NULL;
12101 rootNode->buddy = VMA_NULL;
12104 AddToFreeListFront(0, rootNode);
12107 bool VmaBlockMetadata_Buddy::Validate()
const
12110 ValidationContext ctx;
12111 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
12113 VMA_VALIDATE(
false &&
"ValidateNode failed.");
12115 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
12116 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
12119 for(uint32_t level = 0; level < m_LevelCount; ++level)
12121 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
12122 m_FreeList[level].front->free.prev == VMA_NULL);
12124 for(Node* node = m_FreeList[level].front;
12126 node = node->free.next)
12128 VMA_VALIDATE(node->type == Node::TYPE_FREE);
12130 if(node->free.next == VMA_NULL)
12132 VMA_VALIDATE(m_FreeList[level].back == node);
12136 VMA_VALIDATE(node->free.next->free.prev == node);
12142 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
12144 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
12150 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
12152 for(uint32_t level = 0; level < m_LevelCount; ++level)
12154 if(m_FreeList[level].front != VMA_NULL)
12156 return LevelToNodeSize(level);
12162 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
12164 const VkDeviceSize unusableSize = GetUnusableSize();
12175 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
12177 if(unusableSize > 0)
12186 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
12188 const VkDeviceSize unusableSize = GetUnusableSize();
12190 inoutStats.
size += GetSize();
12191 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
12196 if(unusableSize > 0)
12203 #if VMA_STATS_STRING_ENABLED
12205 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
12209 CalcAllocationStatInfo(stat);
12211 PrintDetailedMap_Begin(
12217 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
12219 const VkDeviceSize unusableSize = GetUnusableSize();
12220 if(unusableSize > 0)
12222 PrintDetailedMap_UnusedRange(json,
12227 PrintDetailedMap_End(json);
12232 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
12233 uint32_t currentFrameIndex,
12234 uint32_t frameInUseCount,
12235 VkDeviceSize bufferImageGranularity,
12236 VkDeviceSize allocSize,
12237 VkDeviceSize allocAlignment,
12239 VmaSuballocationType allocType,
12240 bool canMakeOtherLost,
12242 VmaAllocationRequest* pAllocationRequest)
12244 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
12248 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
12249 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
12250 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
12252 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
12253 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
12256 if(allocSize > m_UsableSize)
12261 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12262 for(uint32_t level = targetLevel + 1; level--; )
12264 for(Node* freeNode = m_FreeList[level].front;
12265 freeNode != VMA_NULL;
12266 freeNode = freeNode->free.next)
12268 if(freeNode->offset % allocAlignment == 0)
12270 pAllocationRequest->type = VmaAllocationRequestType::Normal;
12271 pAllocationRequest->offset = freeNode->offset;
12272 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
12273 pAllocationRequest->sumItemSize = 0;
12274 pAllocationRequest->itemsToMakeLostCount = 0;
12275 pAllocationRequest->customData = (
void*)(uintptr_t)level;
12284 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
12285 uint32_t currentFrameIndex,
12286 uint32_t frameInUseCount,
12287 VmaAllocationRequest* pAllocationRequest)
12293 return pAllocationRequest->itemsToMakeLostCount == 0;
12296 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
12305 void VmaBlockMetadata_Buddy::Alloc(
12306 const VmaAllocationRequest& request,
12307 VmaSuballocationType type,
12308 VkDeviceSize allocSize,
12311 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
12313 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12314 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
12316 Node* currNode = m_FreeList[currLevel].front;
12317 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12318 while(currNode->offset != request.offset)
12320 currNode = currNode->free.next;
12321 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12325 while(currLevel < targetLevel)
12329 RemoveFromFreeList(currLevel, currNode);
12331 const uint32_t childrenLevel = currLevel + 1;
12334 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12335 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12337 leftChild->offset = currNode->offset;
12338 leftChild->type = Node::TYPE_FREE;
12339 leftChild->parent = currNode;
12340 leftChild->buddy = rightChild;
12342 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12343 rightChild->type = Node::TYPE_FREE;
12344 rightChild->parent = currNode;
12345 rightChild->buddy = leftChild;
12348 currNode->type = Node::TYPE_SPLIT;
12349 currNode->split.leftChild = leftChild;
12352 AddToFreeListFront(childrenLevel, rightChild);
12353 AddToFreeListFront(childrenLevel, leftChild);
12358 currNode = m_FreeList[currLevel].front;
12367 VMA_ASSERT(currLevel == targetLevel &&
12368 currNode != VMA_NULL &&
12369 currNode->type == Node::TYPE_FREE);
12370 RemoveFromFreeList(currLevel, currNode);
12373 currNode->type = Node::TYPE_ALLOCATION;
12374 currNode->allocation.alloc = hAllocation;
12376 ++m_AllocationCount;
12378 m_SumFreeSize -= allocSize;
12381 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12383 if(node->type == Node::TYPE_SPLIT)
12385 DeleteNode(node->split.leftChild->buddy);
12386 DeleteNode(node->split.leftChild);
12389 vma_delete(GetAllocationCallbacks(), node);
12392 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12394 VMA_VALIDATE(level < m_LevelCount);
12395 VMA_VALIDATE(curr->parent == parent);
12396 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12397 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12400 case Node::TYPE_FREE:
12402 ctx.calculatedSumFreeSize += levelNodeSize;
12403 ++ctx.calculatedFreeCount;
12405 case Node::TYPE_ALLOCATION:
12406 ++ctx.calculatedAllocationCount;
12407 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12408 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12410 case Node::TYPE_SPLIT:
12412 const uint32_t childrenLevel = level + 1;
12413 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12414 const Node*
const leftChild = curr->split.leftChild;
12415 VMA_VALIDATE(leftChild != VMA_NULL);
12416 VMA_VALIDATE(leftChild->offset == curr->offset);
12417 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12419 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12421 const Node*
const rightChild = leftChild->buddy;
12422 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12423 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12425 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12436 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12439 uint32_t level = 0;
12440 VkDeviceSize currLevelNodeSize = m_UsableSize;
12441 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12442 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12445 currLevelNodeSize = nextLevelNodeSize;
12446 nextLevelNodeSize = currLevelNodeSize >> 1;
12451 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12454 Node* node = m_Root;
12455 VkDeviceSize nodeOffset = 0;
12456 uint32_t level = 0;
12457 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12458 while(node->type == Node::TYPE_SPLIT)
12460 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12461 if(offset < nodeOffset + nextLevelSize)
12463 node = node->split.leftChild;
12467 node = node->split.leftChild->buddy;
12468 nodeOffset += nextLevelSize;
12471 levelNodeSize = nextLevelSize;
12474 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12475 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12478 --m_AllocationCount;
12479 m_SumFreeSize += alloc->GetSize();
12481 node->type = Node::TYPE_FREE;
12484 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12486 RemoveFromFreeList(level, node->buddy);
12487 Node*
const parent = node->parent;
12489 vma_delete(GetAllocationCallbacks(), node->buddy);
12490 vma_delete(GetAllocationCallbacks(), node);
12491 parent->type = Node::TYPE_FREE;
12499 AddToFreeListFront(level, node);
12502 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12506 case Node::TYPE_FREE:
12512 case Node::TYPE_ALLOCATION:
12514 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12520 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12521 if(unusedRangeSize > 0)
12530 case Node::TYPE_SPLIT:
12532 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12533 const Node*
const leftChild = node->split.leftChild;
12534 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12535 const Node*
const rightChild = leftChild->buddy;
12536 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12544 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12546 VMA_ASSERT(node->type == Node::TYPE_FREE);
12549 Node*
const frontNode = m_FreeList[level].front;
12550 if(frontNode == VMA_NULL)
12552 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12553 node->free.prev = node->free.next = VMA_NULL;
12554 m_FreeList[level].front = m_FreeList[level].back = node;
12558 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12559 node->free.prev = VMA_NULL;
12560 node->free.next = frontNode;
12561 frontNode->free.prev = node;
12562 m_FreeList[level].front = node;
12566 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12568 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12571 if(node->free.prev == VMA_NULL)
12573 VMA_ASSERT(m_FreeList[level].front == node);
12574 m_FreeList[level].front = node->free.next;
12578 Node*
const prevFreeNode = node->free.prev;
12579 VMA_ASSERT(prevFreeNode->free.next == node);
12580 prevFreeNode->free.next = node->free.next;
12584 if(node->free.next == VMA_NULL)
12586 VMA_ASSERT(m_FreeList[level].back == node);
12587 m_FreeList[level].back = node->free.prev;
12591 Node*
const nextFreeNode = node->free.next;
12592 VMA_ASSERT(nextFreeNode->free.prev == node);
12593 nextFreeNode->free.prev = node->free.prev;
12597 #if VMA_STATS_STRING_ENABLED
12598 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12602 case Node::TYPE_FREE:
12603 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12605 case Node::TYPE_ALLOCATION:
12607 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12608 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12609 if(allocSize < levelNodeSize)
12611 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12615 case Node::TYPE_SPLIT:
12617 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12618 const Node*
const leftChild = node->split.leftChild;
12619 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12620 const Node*
const rightChild = leftChild->buddy;
12621 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12634 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12635 m_pMetadata(VMA_NULL),
12636 m_MemoryTypeIndex(UINT32_MAX),
12638 m_hMemory(VK_NULL_HANDLE),
12640 m_pMappedData(VMA_NULL)
12644 void VmaDeviceMemoryBlock::Init(
12647 uint32_t newMemoryTypeIndex,
12648 VkDeviceMemory newMemory,
12649 VkDeviceSize newSize,
12651 uint32_t algorithm)
12653 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12655 m_hParentPool = hParentPool;
12656 m_MemoryTypeIndex = newMemoryTypeIndex;
12658 m_hMemory = newMemory;
12663 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12666 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12672 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12674 m_pMetadata->Init(newSize);
12677 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12681 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12683 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12684 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12685 m_hMemory = VK_NULL_HANDLE;
12687 vma_delete(allocator, m_pMetadata);
12688 m_pMetadata = VMA_NULL;
12691 bool VmaDeviceMemoryBlock::Validate()
const
12693 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12694 (m_pMetadata->GetSize() != 0));
12696 return m_pMetadata->Validate();
12699 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12701 void* pData =
nullptr;
12702 VkResult res = Map(hAllocator, 1, &pData);
12703 if(res != VK_SUCCESS)
12708 res = m_pMetadata->CheckCorruption(pData);
12710 Unmap(hAllocator, 1);
12715 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12722 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12723 if(m_MapCount != 0)
12725 m_MapCount += count;
12726 VMA_ASSERT(m_pMappedData != VMA_NULL);
12727 if(ppData != VMA_NULL)
12729 *ppData = m_pMappedData;
12735 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12736 hAllocator->m_hDevice,
12742 if(result == VK_SUCCESS)
12744 if(ppData != VMA_NULL)
12746 *ppData = m_pMappedData;
12748 m_MapCount = count;
12754 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12761 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12762 if(m_MapCount >= count)
12764 m_MapCount -= count;
12765 if(m_MapCount == 0)
12767 m_pMappedData = VMA_NULL;
12768 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12773 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12777 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12779 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12780 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12783 VkResult res = Map(hAllocator, 1, &pData);
12784 if(res != VK_SUCCESS)
12789 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12790 VmaWriteMagicValue(pData, allocOffset + allocSize);
12792 Unmap(hAllocator, 1);
12797 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12799 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12800 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12803 VkResult res = Map(hAllocator, 1, &pData);
12804 if(res != VK_SUCCESS)
12809 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12811 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12813 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12815 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12818 Unmap(hAllocator, 1);
12823 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12826 VkDeviceSize allocationLocalOffset,
12830 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12831 hAllocation->GetBlock() ==
this);
12832 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12833 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12834 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12836 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12837 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12840 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12843 VkDeviceSize allocationLocalOffset,
12847 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12848 hAllocation->GetBlock() ==
this);
12849 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12850 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12851 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12853 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12854 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12859 memset(&outInfo, 0,
sizeof(outInfo));
12878 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12886 VmaPool_T::VmaPool_T(
12889 VkDeviceSize preferredBlockSize) :
12893 createInfo.memoryTypeIndex,
12894 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12895 createInfo.minBlockCount,
12896 createInfo.maxBlockCount,
12898 createInfo.frameInUseCount,
12899 createInfo.blockSize != 0,
12901 createInfo.priority,
12902 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment),
12903 createInfo.pMemoryAllocateNext),
12909 VmaPool_T::~VmaPool_T()
12911 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
12914 void VmaPool_T::SetName(
const char* pName)
12916 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12917 VmaFreeString(allocs, m_Name);
12919 if(pName != VMA_NULL)
12921 m_Name = VmaCreateStringCopy(allocs, pName);
12929 #if VMA_STATS_STRING_ENABLED
12933 VmaBlockVector::VmaBlockVector(
12936 uint32_t memoryTypeIndex,
12937 VkDeviceSize preferredBlockSize,
12938 size_t minBlockCount,
12939 size_t maxBlockCount,
12940 VkDeviceSize bufferImageGranularity,
12941 uint32_t frameInUseCount,
12942 bool explicitBlockSize,
12943 uint32_t algorithm,
12945 VkDeviceSize minAllocationAlignment,
12946 void* pMemoryAllocateNext) :
12947 m_hAllocator(hAllocator),
12948 m_hParentPool(hParentPool),
12949 m_MemoryTypeIndex(memoryTypeIndex),
12950 m_PreferredBlockSize(preferredBlockSize),
12951 m_MinBlockCount(minBlockCount),
12952 m_MaxBlockCount(maxBlockCount),
12953 m_BufferImageGranularity(bufferImageGranularity),
12954 m_FrameInUseCount(frameInUseCount),
12955 m_ExplicitBlockSize(explicitBlockSize),
12956 m_Algorithm(algorithm),
12957 m_Priority(priority),
12958 m_MinAllocationAlignment(minAllocationAlignment),
12959 m_pMemoryAllocateNext(pMemoryAllocateNext),
12960 m_HasEmptyBlock(false),
12961 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12966 VmaBlockVector::~VmaBlockVector()
12968 for(
size_t i = m_Blocks.size(); i--; )
12970 m_Blocks[i]->Destroy(m_hAllocator);
12971 vma_delete(m_hAllocator, m_Blocks[i]);
12975 VkResult VmaBlockVector::CreateMinBlocks()
12977 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12979 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12980 if(res != VK_SUCCESS)
12988 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12990 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12992 const size_t blockCount = m_Blocks.size();
13001 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13003 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13004 VMA_ASSERT(pBlock);
13005 VMA_HEAVY_ASSERT(pBlock->Validate());
13006 pBlock->m_pMetadata->AddPoolStats(*pStats);
13010 bool VmaBlockVector::IsEmpty()
13012 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13013 return m_Blocks.empty();
13016 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
13018 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13019 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
13020 (VMA_DEBUG_MARGIN > 0) &&
13022 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
13025 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
13027 VkResult VmaBlockVector::Allocate(
13028 uint32_t currentFrameIndex,
13030 VkDeviceSize alignment,
13032 VmaSuballocationType suballocType,
13033 size_t allocationCount,
13037 VkResult res = VK_SUCCESS;
13039 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
13041 if(IsCorruptionDetectionEnabled())
13043 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13044 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13048 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13049 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13051 res = AllocatePage(
13057 pAllocations + allocIndex);
13058 if(res != VK_SUCCESS)
13065 if(res != VK_SUCCESS)
13068 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13069 while(allocIndex--)
13071 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
13072 const VkDeviceSize allocSize = alloc->GetSize();
13074 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
13076 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
13082 VkResult VmaBlockVector::AllocatePage(
13083 uint32_t currentFrameIndex,
13085 VkDeviceSize alignment,
13087 VmaSuballocationType suballocType,
13095 VkDeviceSize freeMemory;
13097 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13099 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13103 const bool canFallbackToDedicated = !IsCustomPool();
13104 const bool canCreateNewBlock =
13106 (m_Blocks.size() < m_MaxBlockCount) &&
13107 (freeMemory >= size || !canFallbackToDedicated);
13114 canMakeOtherLost =
false;
13118 if(isUpperAddress &&
13121 return VK_ERROR_FEATURE_NOT_PRESENT;
13135 return VK_ERROR_FEATURE_NOT_PRESENT;
13139 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
13141 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13149 if(!canMakeOtherLost || canCreateNewBlock)
13158 if(!m_Blocks.empty())
13160 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
13161 VMA_ASSERT(pCurrBlock);
13162 VkResult res = AllocateFromBlock(
13172 if(res == VK_SUCCESS)
13174 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
13184 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13186 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13187 VMA_ASSERT(pCurrBlock);
13188 VkResult res = AllocateFromBlock(
13198 if(res == VK_SUCCESS)
13200 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13208 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13210 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13211 VMA_ASSERT(pCurrBlock);
13212 VkResult res = AllocateFromBlock(
13222 if(res == VK_SUCCESS)
13224 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13232 if(canCreateNewBlock)
13235 VkDeviceSize newBlockSize = m_PreferredBlockSize;
13236 uint32_t newBlockSizeShift = 0;
13237 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
13239 if(!m_ExplicitBlockSize)
13242 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
13243 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
13245 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13246 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
13248 newBlockSize = smallerNewBlockSize;
13249 ++newBlockSizeShift;
13258 size_t newBlockIndex = 0;
13259 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13260 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13262 if(!m_ExplicitBlockSize)
13264 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
13266 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13267 if(smallerNewBlockSize >= size)
13269 newBlockSize = smallerNewBlockSize;
13270 ++newBlockSizeShift;
13271 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13272 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13281 if(res == VK_SUCCESS)
13283 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
13284 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
13286 res = AllocateFromBlock(
13296 if(res == VK_SUCCESS)
13298 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
13304 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13311 if(canMakeOtherLost)
13313 uint32_t tryIndex = 0;
13314 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
13316 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
13317 VmaAllocationRequest bestRequest = {};
13318 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
13324 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13326 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13327 VMA_ASSERT(pCurrBlock);
13328 VmaAllocationRequest currRequest = {};
13329 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13332 m_BufferImageGranularity,
13341 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13342 if(pBestRequestBlock == VMA_NULL ||
13343 currRequestCost < bestRequestCost)
13345 pBestRequestBlock = pCurrBlock;
13346 bestRequest = currRequest;
13347 bestRequestCost = currRequestCost;
13349 if(bestRequestCost == 0)
13360 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13362 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13363 VMA_ASSERT(pCurrBlock);
13364 VmaAllocationRequest currRequest = {};
13365 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13368 m_BufferImageGranularity,
13377 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13378 if(pBestRequestBlock == VMA_NULL ||
13379 currRequestCost < bestRequestCost ||
13382 pBestRequestBlock = pCurrBlock;
13383 bestRequest = currRequest;
13384 bestRequestCost = currRequestCost;
13386 if(bestRequestCost == 0 ||
13396 if(pBestRequestBlock != VMA_NULL)
13400 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13401 if(res != VK_SUCCESS)
13407 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13413 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13414 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13415 UpdateHasEmptyBlock();
13416 (*pAllocation)->InitBlockAllocation(
13418 bestRequest.offset,
13425 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13426 VMA_DEBUG_LOG(
" Returned from existing block");
13427 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13428 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13429 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13431 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13433 if(IsCorruptionDetectionEnabled())
13435 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13436 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13451 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13453 return VK_ERROR_TOO_MANY_OBJECTS;
13457 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13460 void VmaBlockVector::Free(
13463 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13465 bool budgetExceeded =
false;
13467 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13469 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13470 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13475 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13477 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13479 if(IsCorruptionDetectionEnabled())
13481 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13482 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13485 if(hAllocation->IsPersistentMap())
13487 pBlock->Unmap(m_hAllocator, 1);
13490 pBlock->m_pMetadata->Free(hAllocation);
13491 VMA_HEAVY_ASSERT(pBlock->Validate());
13493 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13495 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13497 if(pBlock->m_pMetadata->IsEmpty())
13500 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13502 pBlockToDelete = pBlock;
13509 else if(m_HasEmptyBlock && canDeleteBlock)
13511 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13512 if(pLastBlock->m_pMetadata->IsEmpty())
13514 pBlockToDelete = pLastBlock;
13515 m_Blocks.pop_back();
13519 UpdateHasEmptyBlock();
13520 IncrementallySortBlocks();
13525 if(pBlockToDelete != VMA_NULL)
13527 VMA_DEBUG_LOG(
" Deleted empty block");
13528 pBlockToDelete->Destroy(m_hAllocator);
13529 vma_delete(m_hAllocator, pBlockToDelete);
13533 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13535 VkDeviceSize result = 0;
13536 for(
size_t i = m_Blocks.size(); i--; )
13538 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13539 if(result >= m_PreferredBlockSize)
13547 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13549 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13551 if(m_Blocks[blockIndex] == pBlock)
13553 VmaVectorRemove(m_Blocks, blockIndex);
13560 void VmaBlockVector::IncrementallySortBlocks()
13565 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13567 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13569 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13576 VkResult VmaBlockVector::AllocateFromBlock(
13577 VmaDeviceMemoryBlock* pBlock,
13578 uint32_t currentFrameIndex,
13580 VkDeviceSize alignment,
13583 VmaSuballocationType suballocType,
13592 VmaAllocationRequest currRequest = {};
13593 if(pBlock->m_pMetadata->CreateAllocationRequest(
13596 m_BufferImageGranularity,
13606 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13610 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13611 if(res != VK_SUCCESS)
13617 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13618 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13619 UpdateHasEmptyBlock();
13620 (*pAllocation)->InitBlockAllocation(
13622 currRequest.offset,
13629 VMA_HEAVY_ASSERT(pBlock->Validate());
13630 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13631 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13632 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13634 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13636 if(IsCorruptionDetectionEnabled())
13638 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13639 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13643 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13646 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13648 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13649 allocInfo.pNext = m_pMemoryAllocateNext;
13650 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13651 allocInfo.allocationSize = blockSize;
13653 #if VMA_BUFFER_DEVICE_ADDRESS
13655 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13656 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13658 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13659 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13663 #if VMA_MEMORY_PRIORITY
13664 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
13665 if(m_hAllocator->m_UseExtMemoryPriority)
13667 priorityInfo.priority = m_Priority;
13668 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
13672 VkDeviceMemory mem = VK_NULL_HANDLE;
13673 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13682 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13688 allocInfo.allocationSize,
13692 m_Blocks.push_back(pBlock);
13693 if(pNewBlockIndex != VMA_NULL)
13695 *pNewBlockIndex = m_Blocks.size() - 1;
13701 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13702 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13703 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13705 const size_t blockCount = m_Blocks.size();
13706 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13710 BLOCK_FLAG_USED = 0x00000001,
13711 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13719 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13720 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13721 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13724 const size_t moveCount = moves.size();
13725 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13727 const VmaDefragmentationMove& move = moves[moveIndex];
13728 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13729 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13732 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13735 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13737 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13738 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13739 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13741 currBlockInfo.pMappedData = pBlock->GetMappedData();
13743 if(currBlockInfo.pMappedData == VMA_NULL)
13745 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13746 if(pDefragCtx->res == VK_SUCCESS)
13748 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13755 if(pDefragCtx->res == VK_SUCCESS)
13757 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13758 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13760 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13762 const VmaDefragmentationMove& move = moves[moveIndex];
13764 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13765 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13767 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13772 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13773 memRange.memory = pSrcBlock->GetDeviceMemory();
13774 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13775 memRange.size = VMA_MIN(
13776 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13777 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13778 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13783 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13784 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13785 static_cast<size_t>(move.size));
13787 if(IsCorruptionDetectionEnabled())
13789 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13790 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13796 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13797 memRange.memory = pDstBlock->GetDeviceMemory();
13798 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13799 memRange.size = VMA_MIN(
13800 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13801 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13802 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13809 for(
size_t blockIndex = blockCount; blockIndex--; )
13811 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13812 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13814 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13815 pBlock->Unmap(m_hAllocator, 1);
13820 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13821 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13822 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13823 VkCommandBuffer commandBuffer)
13825 const size_t blockCount = m_Blocks.size();
13827 pDefragCtx->blockContexts.resize(blockCount);
13828 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13831 const size_t moveCount = moves.size();
13832 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13834 const VmaDefragmentationMove& move = moves[moveIndex];
13839 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13840 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13844 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13848 VkBufferCreateInfo bufCreateInfo;
13849 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13851 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13853 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13854 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13855 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13857 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13858 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13859 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13860 if(pDefragCtx->res == VK_SUCCESS)
13862 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13863 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13870 if(pDefragCtx->res == VK_SUCCESS)
13872 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13874 const VmaDefragmentationMove& move = moves[moveIndex];
13876 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13877 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13879 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13881 VkBufferCopy region = {
13885 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13886 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13891 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13893 pDefragCtx->res = VK_NOT_READY;
13899 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13901 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13902 if(pBlock->m_pMetadata->IsEmpty())
13904 if(m_Blocks.size() > m_MinBlockCount)
13906 if(pDefragmentationStats != VMA_NULL)
13909 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13912 VmaVectorRemove(m_Blocks, blockIndex);
13913 pBlock->Destroy(m_hAllocator);
13914 vma_delete(m_hAllocator, pBlock);
13922 UpdateHasEmptyBlock();
13925 void VmaBlockVector::UpdateHasEmptyBlock()
13927 m_HasEmptyBlock =
false;
13928 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13930 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13931 if(pBlock->m_pMetadata->IsEmpty())
13933 m_HasEmptyBlock =
true;
13939 #if VMA_STATS_STRING_ENABLED
13941 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13943 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13945 json.BeginObject();
13949 const char* poolName = m_hParentPool->GetName();
13950 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13952 json.WriteString(
"Name");
13953 json.WriteString(poolName);
13956 json.WriteString(
"MemoryTypeIndex");
13957 json.WriteNumber(m_MemoryTypeIndex);
13959 json.WriteString(
"BlockSize");
13960 json.WriteNumber(m_PreferredBlockSize);
13962 json.WriteString(
"BlockCount");
13963 json.BeginObject(
true);
13964 if(m_MinBlockCount > 0)
13966 json.WriteString(
"Min");
13967 json.WriteNumber((uint64_t)m_MinBlockCount);
13969 if(m_MaxBlockCount < SIZE_MAX)
13971 json.WriteString(
"Max");
13972 json.WriteNumber((uint64_t)m_MaxBlockCount);
13974 json.WriteString(
"Cur");
13975 json.WriteNumber((uint64_t)m_Blocks.size());
13978 if(m_FrameInUseCount > 0)
13980 json.WriteString(
"FrameInUseCount");
13981 json.WriteNumber(m_FrameInUseCount);
13984 if(m_Algorithm != 0)
13986 json.WriteString(
"Algorithm");
13987 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13992 json.WriteString(
"PreferredBlockSize");
13993 json.WriteNumber(m_PreferredBlockSize);
13996 json.WriteString(
"Blocks");
13997 json.BeginObject();
13998 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14000 json.BeginString();
14001 json.ContinueString(m_Blocks[i]->GetId());
14004 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
14013 void VmaBlockVector::Defragment(
14014 class VmaBlockVectorDefragmentationContext* pCtx,
14016 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
14017 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
14018 VkCommandBuffer commandBuffer)
14020 pCtx->res = VK_SUCCESS;
14022 const VkMemoryPropertyFlags memPropFlags =
14023 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
14024 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
14026 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
14028 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
14029 !IsCorruptionDetectionEnabled() &&
14030 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
14033 if(canDefragmentOnCpu || canDefragmentOnGpu)
14035 bool defragmentOnGpu;
14037 if(canDefragmentOnGpu != canDefragmentOnCpu)
14039 defragmentOnGpu = canDefragmentOnGpu;
14044 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
14045 m_hAllocator->IsIntegratedGpu();
14048 bool overlappingMoveSupported = !defragmentOnGpu;
14050 if(m_hAllocator->m_UseMutex)
14054 if(!m_Mutex.TryLockWrite())
14056 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
14062 m_Mutex.LockWrite();
14063 pCtx->mutexLocked =
true;
14067 pCtx->Begin(overlappingMoveSupported, flags);
14071 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
14072 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
14073 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
14076 if(pStats != VMA_NULL)
14078 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
14079 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
14082 VMA_ASSERT(bytesMoved <= maxBytesToMove);
14083 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
14084 if(defragmentOnGpu)
14086 maxGpuBytesToMove -= bytesMoved;
14087 maxGpuAllocationsToMove -= allocationsMoved;
14091 maxCpuBytesToMove -= bytesMoved;
14092 maxCpuAllocationsToMove -= allocationsMoved;
14098 if(m_hAllocator->m_UseMutex)
14099 m_Mutex.UnlockWrite();
14101 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
14102 pCtx->res = VK_NOT_READY;
14107 if(pCtx->res >= VK_SUCCESS)
14109 if(defragmentOnGpu)
14111 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
14115 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
14121 void VmaBlockVector::DefragmentationEnd(
14122 class VmaBlockVectorDefragmentationContext* pCtx,
14128 VMA_ASSERT(pCtx->mutexLocked ==
false);
14132 m_Mutex.LockWrite();
14133 pCtx->mutexLocked =
true;
14137 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
14140 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
14142 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
14143 if(blockCtx.hBuffer)
14145 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
14149 if(pCtx->res >= VK_SUCCESS)
14151 FreeEmptyBlocks(pStats);
14155 if(pCtx->mutexLocked)
14157 VMA_ASSERT(m_hAllocator->m_UseMutex);
14158 m_Mutex.UnlockWrite();
14162 uint32_t VmaBlockVector::ProcessDefragmentations(
14163 class VmaBlockVectorDefragmentationContext *pCtx,
14166 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14168 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
14170 for(uint32_t i = 0; i < moveCount; ++ i)
14172 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
14175 pMove->
memory = move.pDstBlock->GetDeviceMemory();
14176 pMove->
offset = move.dstOffset;
14181 pCtx->defragmentationMovesProcessed += moveCount;
14186 void VmaBlockVector::CommitDefragmentations(
14187 class VmaBlockVectorDefragmentationContext *pCtx,
14190 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14192 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
14194 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
14196 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
14197 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
14200 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
14201 FreeEmptyBlocks(pStats);
14204 size_t VmaBlockVector::CalcAllocationCount()
const
14207 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14209 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
14214 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
14216 if(m_BufferImageGranularity == 1)
14220 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
14221 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
14223 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
14224 VMA_ASSERT(m_Algorithm == 0);
14225 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
14226 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
14234 void VmaBlockVector::MakePoolAllocationsLost(
14235 uint32_t currentFrameIndex,
14236 size_t* pLostAllocationCount)
14238 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14239 size_t lostAllocationCount = 0;
14240 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14242 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14243 VMA_ASSERT(pBlock);
14244 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
14246 if(pLostAllocationCount != VMA_NULL)
14248 *pLostAllocationCount = lostAllocationCount;
14252 VkResult VmaBlockVector::CheckCorruption()
14254 if(!IsCorruptionDetectionEnabled())
14256 return VK_ERROR_FEATURE_NOT_PRESENT;
14259 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14260 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14262 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14263 VMA_ASSERT(pBlock);
14264 VkResult res = pBlock->CheckCorruption(m_hAllocator);
14265 if(res != VK_SUCCESS)
14273 void VmaBlockVector::AddStats(
VmaStats* pStats)
14275 const uint32_t memTypeIndex = m_MemoryTypeIndex;
14276 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
14278 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14280 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14282 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14283 VMA_ASSERT(pBlock);
14284 VMA_HEAVY_ASSERT(pBlock->Validate());
14286 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
14287 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14288 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14289 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14296 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
14298 VmaBlockVector* pBlockVector,
14299 uint32_t currentFrameIndex,
14300 bool overlappingMoveSupported) :
14301 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14302 m_AllocationCount(0),
14303 m_AllAllocations(false),
14305 m_AllocationsMoved(0),
14306 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
14309 const size_t blockCount = m_pBlockVector->m_Blocks.size();
14310 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14312 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
14313 pBlockInfo->m_OriginalBlockIndex = blockIndex;
14314 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
14315 m_Blocks.push_back(pBlockInfo);
14319 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
14322 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
14324 for(
size_t i = m_Blocks.size(); i--; )
14326 vma_delete(m_hAllocator, m_Blocks[i]);
14330 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14333 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
14335 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
14336 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
14337 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
14339 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
14340 (*it)->m_Allocations.push_back(allocInfo);
14347 ++m_AllocationCount;
14351 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
14352 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14353 VkDeviceSize maxBytesToMove,
14354 uint32_t maxAllocationsToMove,
14355 bool freeOldAllocations)
14357 if(m_Blocks.empty())
14370 size_t srcBlockMinIndex = 0;
14383 size_t srcBlockIndex = m_Blocks.size() - 1;
14384 size_t srcAllocIndex = SIZE_MAX;
14390 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14392 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14395 if(srcBlockIndex == srcBlockMinIndex)
14402 srcAllocIndex = SIZE_MAX;
14407 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14411 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14412 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14414 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14415 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14416 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14417 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14420 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14422 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14423 VmaAllocationRequest dstAllocRequest;
14424 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14425 m_CurrentFrameIndex,
14426 m_pBlockVector->GetFrameInUseCount(),
14427 m_pBlockVector->GetBufferImageGranularity(),
14434 &dstAllocRequest) &&
14436 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14438 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14441 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14442 (m_BytesMoved + size > maxBytesToMove))
14447 VmaDefragmentationMove move = {};
14448 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14449 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14450 move.srcOffset = srcOffset;
14451 move.dstOffset = dstAllocRequest.offset;
14453 move.hAllocation = allocInfo.m_hAllocation;
14454 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14455 move.pDstBlock = pDstBlockInfo->m_pBlock;
14457 moves.push_back(move);
14459 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14463 allocInfo.m_hAllocation);
14465 if(freeOldAllocations)
14467 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14468 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14471 if(allocInfo.m_pChanged != VMA_NULL)
14473 *allocInfo.m_pChanged = VK_TRUE;
14476 ++m_AllocationsMoved;
14477 m_BytesMoved += size;
14479 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14487 if(srcAllocIndex > 0)
14493 if(srcBlockIndex > 0)
14496 srcAllocIndex = SIZE_MAX;
14506 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14509 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14511 if(m_Blocks[i]->m_HasNonMovableAllocations)
14519 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14520 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14521 VkDeviceSize maxBytesToMove,
14522 uint32_t maxAllocationsToMove,
14525 if(!m_AllAllocations && m_AllocationCount == 0)
14530 const size_t blockCount = m_Blocks.size();
14531 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14533 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14535 if(m_AllAllocations)
14537 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14538 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14539 it != pMetadata->m_Suballocations.end();
14542 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14544 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14545 pBlockInfo->m_Allocations.push_back(allocInfo);
14550 pBlockInfo->CalcHasNonMovableAllocations();
14554 pBlockInfo->SortAllocationsByOffsetDescending();
14560 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14563 const uint32_t roundCount = 2;
14566 VkResult result = VK_SUCCESS;
14567 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14575 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14576 size_t dstBlockIndex, VkDeviceSize dstOffset,
14577 size_t srcBlockIndex, VkDeviceSize srcOffset)
14579 if(dstBlockIndex < srcBlockIndex)
14583 if(dstBlockIndex > srcBlockIndex)
14587 if(dstOffset < srcOffset)
14597 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14599 VmaBlockVector* pBlockVector,
14600 uint32_t currentFrameIndex,
14601 bool overlappingMoveSupported) :
14602 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14603 m_OverlappingMoveSupported(overlappingMoveSupported),
14604 m_AllocationCount(0),
14605 m_AllAllocations(false),
14607 m_AllocationsMoved(0),
14608 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14610 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14614 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14618 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14619 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14620 VkDeviceSize maxBytesToMove,
14621 uint32_t maxAllocationsToMove,
14624 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14626 const size_t blockCount = m_pBlockVector->GetBlockCount();
14627 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14632 PreprocessMetadata();
14636 m_BlockInfos.resize(blockCount);
14637 for(
size_t i = 0; i < blockCount; ++i)
14639 m_BlockInfos[i].origBlockIndex = i;
14642 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14643 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14644 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14649 FreeSpaceDatabase freeSpaceDb;
14651 size_t dstBlockInfoIndex = 0;
14652 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14653 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14654 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14655 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14656 VkDeviceSize dstOffset = 0;
14659 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14661 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14662 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14663 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14664 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14665 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14667 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14668 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14669 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14670 if(m_AllocationsMoved == maxAllocationsToMove ||
14671 m_BytesMoved + srcAllocSize > maxBytesToMove)
14676 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14678 VmaDefragmentationMove move = {};
14680 size_t freeSpaceInfoIndex;
14681 VkDeviceSize dstAllocOffset;
14682 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14683 freeSpaceInfoIndex, dstAllocOffset))
14685 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14686 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14687 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14690 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14692 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14696 VmaSuballocation suballoc = *srcSuballocIt;
14697 suballoc.offset = dstAllocOffset;
14698 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14699 m_BytesMoved += srcAllocSize;
14700 ++m_AllocationsMoved;
14702 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14704 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14705 srcSuballocIt = nextSuballocIt;
14707 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14709 move.srcBlockIndex = srcOrigBlockIndex;
14710 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14711 move.srcOffset = srcAllocOffset;
14712 move.dstOffset = dstAllocOffset;
14713 move.size = srcAllocSize;
14715 moves.push_back(move);
14722 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14724 VmaSuballocation suballoc = *srcSuballocIt;
14725 suballoc.offset = dstAllocOffset;
14726 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14727 m_BytesMoved += srcAllocSize;
14728 ++m_AllocationsMoved;
14730 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14732 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14733 srcSuballocIt = nextSuballocIt;
14735 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14737 move.srcBlockIndex = srcOrigBlockIndex;
14738 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14739 move.srcOffset = srcAllocOffset;
14740 move.dstOffset = dstAllocOffset;
14741 move.size = srcAllocSize;
14743 moves.push_back(move);
14748 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14751 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14752 dstAllocOffset + srcAllocSize > dstBlockSize)
14755 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14757 ++dstBlockInfoIndex;
14758 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14759 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14760 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14761 dstBlockSize = pDstMetadata->GetSize();
14763 dstAllocOffset = 0;
14767 if(dstBlockInfoIndex == srcBlockInfoIndex)
14769 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14771 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14773 bool skipOver = overlap;
14774 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14778 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14783 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14785 dstOffset = srcAllocOffset + srcAllocSize;
14791 srcSuballocIt->offset = dstAllocOffset;
14792 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14793 dstOffset = dstAllocOffset + srcAllocSize;
14794 m_BytesMoved += srcAllocSize;
14795 ++m_AllocationsMoved;
14798 move.srcBlockIndex = srcOrigBlockIndex;
14799 move.dstBlockIndex = dstOrigBlockIndex;
14800 move.srcOffset = srcAllocOffset;
14801 move.dstOffset = dstAllocOffset;
14802 move.size = srcAllocSize;
14804 moves.push_back(move);
14812 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14813 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14815 VmaSuballocation suballoc = *srcSuballocIt;
14816 suballoc.offset = dstAllocOffset;
14817 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14818 dstOffset = dstAllocOffset + srcAllocSize;
14819 m_BytesMoved += srcAllocSize;
14820 ++m_AllocationsMoved;
14822 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14824 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14825 srcSuballocIt = nextSuballocIt;
14827 pDstMetadata->m_Suballocations.push_back(suballoc);
14829 move.srcBlockIndex = srcOrigBlockIndex;
14830 move.dstBlockIndex = dstOrigBlockIndex;
14831 move.srcOffset = srcAllocOffset;
14832 move.dstOffset = dstAllocOffset;
14833 move.size = srcAllocSize;
14835 moves.push_back(move);
14841 m_BlockInfos.clear();
14843 PostprocessMetadata();
14848 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14850 const size_t blockCount = m_pBlockVector->GetBlockCount();
14851 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14853 VmaBlockMetadata_Generic*
const pMetadata =
14854 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14855 pMetadata->m_FreeCount = 0;
14856 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14857 pMetadata->m_FreeSuballocationsBySize.clear();
14858 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14859 it != pMetadata->m_Suballocations.end(); )
14861 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14863 VmaSuballocationList::iterator nextIt = it;
14865 pMetadata->m_Suballocations.erase(it);
14876 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14878 const size_t blockCount = m_pBlockVector->GetBlockCount();
14879 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14881 VmaBlockMetadata_Generic*
const pMetadata =
14882 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14883 const VkDeviceSize blockSize = pMetadata->GetSize();
14886 if(pMetadata->m_Suballocations.empty())
14888 pMetadata->m_FreeCount = 1;
14890 VmaSuballocation suballoc = {
14894 VMA_SUBALLOCATION_TYPE_FREE };
14895 pMetadata->m_Suballocations.push_back(suballoc);
14896 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14901 VkDeviceSize offset = 0;
14902 VmaSuballocationList::iterator it;
14903 for(it = pMetadata->m_Suballocations.begin();
14904 it != pMetadata->m_Suballocations.end();
14907 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14908 VMA_ASSERT(it->offset >= offset);
14911 if(it->offset > offset)
14913 ++pMetadata->m_FreeCount;
14914 const VkDeviceSize freeSize = it->offset - offset;
14915 VmaSuballocation suballoc = {
14919 VMA_SUBALLOCATION_TYPE_FREE };
14920 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14921 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14923 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14927 pMetadata->m_SumFreeSize -= it->size;
14928 offset = it->offset + it->size;
14932 if(offset < blockSize)
14934 ++pMetadata->m_FreeCount;
14935 const VkDeviceSize freeSize = blockSize - offset;
14936 VmaSuballocation suballoc = {
14940 VMA_SUBALLOCATION_TYPE_FREE };
14941 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14942 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14943 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14945 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14950 pMetadata->m_FreeSuballocationsBySize.begin(),
14951 pMetadata->m_FreeSuballocationsBySize.end(),
14952 VmaSuballocationItemSizeLess());
14955 VMA_HEAVY_ASSERT(pMetadata->Validate());
14959 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14962 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14963 while(it != pMetadata->m_Suballocations.end())
14965 if(it->offset < suballoc.offset)
14970 pMetadata->m_Suballocations.insert(it, suballoc);
14976 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14979 VmaBlockVector* pBlockVector,
14980 uint32_t currFrameIndex) :
14982 mutexLocked(false),
14983 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14984 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14985 defragmentationMovesProcessed(0),
14986 defragmentationMovesCommitted(0),
14987 hasDefragmentationPlan(0),
14988 m_hAllocator(hAllocator),
14989 m_hCustomPool(hCustomPool),
14990 m_pBlockVector(pBlockVector),
14991 m_CurrFrameIndex(currFrameIndex),
14992 m_pAlgorithm(VMA_NULL),
14993 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14994 m_AllAllocations(false)
14998 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
15000 vma_delete(m_hAllocator, m_pAlgorithm);
15003 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
15005 AllocInfo info = { hAlloc, pChanged };
15006 m_Allocations.push_back(info);
15009 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
15011 const bool allAllocations = m_AllAllocations ||
15012 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
15025 if(VMA_DEBUG_MARGIN == 0 &&
15027 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
15030 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
15031 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15035 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
15036 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15041 m_pAlgorithm->AddAll();
15045 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
15047 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
15055 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
15057 uint32_t currFrameIndex,
15060 m_hAllocator(hAllocator),
15061 m_CurrFrameIndex(currFrameIndex),
15064 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
15066 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
15069 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
15071 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15073 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
15074 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15075 vma_delete(m_hAllocator, pBlockVectorCtx);
15077 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
15079 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
15080 if(pBlockVectorCtx)
15082 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15083 vma_delete(m_hAllocator, pBlockVectorCtx);
15088 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
15090 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15092 VmaPool pool = pPools[poolIndex];
15095 if(pool->m_BlockVector.GetAlgorithm() == 0)
15097 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15099 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15101 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
15103 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15108 if(!pBlockVectorDefragCtx)
15110 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15113 &pool->m_BlockVector,
15115 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15118 pBlockVectorDefragCtx->AddAll();
15123 void VmaDefragmentationContext_T::AddAllocations(
15124 uint32_t allocationCount,
15126 VkBool32* pAllocationsChanged)
15129 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15132 VMA_ASSERT(hAlloc);
15134 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
15136 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
15138 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15140 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
15142 if(hAllocPool != VK_NULL_HANDLE)
15145 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
15147 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15149 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
15151 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15155 if(!pBlockVectorDefragCtx)
15157 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15160 &hAllocPool->m_BlockVector,
15162 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15169 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
15170 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
15171 if(!pBlockVectorDefragCtx)
15173 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15176 m_hAllocator->m_pBlockVectors[memTypeIndex],
15178 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
15182 if(pBlockVectorDefragCtx)
15184 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
15185 &pAllocationsChanged[allocIndex] : VMA_NULL;
15186 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
15192 VkResult VmaDefragmentationContext_T::Defragment(
15193 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
15194 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
15206 m_MaxCpuBytesToMove = maxCpuBytesToMove;
15207 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
15209 m_MaxGpuBytesToMove = maxGpuBytesToMove;
15210 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
15212 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
15213 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
15216 return VK_NOT_READY;
15219 if(commandBuffer == VK_NULL_HANDLE)
15221 maxGpuBytesToMove = 0;
15222 maxGpuAllocationsToMove = 0;
15225 VkResult res = VK_SUCCESS;
15228 for(uint32_t memTypeIndex = 0;
15229 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
15232 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15233 if(pBlockVectorCtx)
15235 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15236 pBlockVectorCtx->GetBlockVector()->Defragment(
15239 maxCpuBytesToMove, maxCpuAllocationsToMove,
15240 maxGpuBytesToMove, maxGpuAllocationsToMove,
15242 if(pBlockVectorCtx->res != VK_SUCCESS)
15244 res = pBlockVectorCtx->res;
15250 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15251 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
15254 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15255 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15256 pBlockVectorCtx->GetBlockVector()->Defragment(
15259 maxCpuBytesToMove, maxCpuAllocationsToMove,
15260 maxGpuBytesToMove, maxGpuAllocationsToMove,
15262 if(pBlockVectorCtx->res != VK_SUCCESS)
15264 res = pBlockVectorCtx->res;
15277 for(uint32_t memTypeIndex = 0;
15278 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15281 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15282 if(pBlockVectorCtx)
15284 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15286 if(!pBlockVectorCtx->hasDefragmentationPlan)
15288 pBlockVectorCtx->GetBlockVector()->Defragment(
15291 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15292 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15295 if(pBlockVectorCtx->res < VK_SUCCESS)
15298 pBlockVectorCtx->hasDefragmentationPlan =
true;
15301 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15303 pCurrentMove, movesLeft);
15305 movesLeft -= processed;
15306 pCurrentMove += processed;
15311 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15312 customCtxIndex < customCtxCount;
15315 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15316 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15318 if(!pBlockVectorCtx->hasDefragmentationPlan)
15320 pBlockVectorCtx->GetBlockVector()->Defragment(
15323 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15324 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15327 if(pBlockVectorCtx->res < VK_SUCCESS)
15330 pBlockVectorCtx->hasDefragmentationPlan =
true;
15333 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15335 pCurrentMove, movesLeft);
15337 movesLeft -= processed;
15338 pCurrentMove += processed;
15345 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
15347 VkResult res = VK_SUCCESS;
15350 for(uint32_t memTypeIndex = 0;
15351 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15354 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15355 if(pBlockVectorCtx)
15357 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15359 if(!pBlockVectorCtx->hasDefragmentationPlan)
15361 res = VK_NOT_READY;
15365 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15366 pBlockVectorCtx, m_pStats);
15368 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15369 res = VK_NOT_READY;
15374 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15375 customCtxIndex < customCtxCount;
15378 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15379 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15381 if(!pBlockVectorCtx->hasDefragmentationPlan)
15383 res = VK_NOT_READY;
15387 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15388 pBlockVectorCtx, m_pStats);
15390 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15391 res = VK_NOT_READY;
15400 #if VMA_RECORDING_ENABLED
15402 VmaRecorder::VmaRecorder() :
15406 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15412 m_UseMutex = useMutex;
15413 m_Flags = settings.
flags;
15415 #if defined(_WIN32)
15417 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15421 return VK_ERROR_INITIALIZATION_FAILED;
15425 m_File = fopen(settings.
pFilePath,
"wb");
15429 return VK_ERROR_INITIALIZATION_FAILED;
15434 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15435 fprintf(m_File,
"%s\n",
"1,8");
15440 VmaRecorder::~VmaRecorder()
15442 if(m_File != VMA_NULL)
15448 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15450 CallParams callParams;
15451 GetBasicParams(callParams);
15453 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15454 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15458 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15460 CallParams callParams;
15461 GetBasicParams(callParams);
15463 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15464 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15470 CallParams callParams;
15471 GetBasicParams(callParams);
15473 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15474 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15485 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15487 CallParams callParams;
15488 GetBasicParams(callParams);
15490 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15491 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15496 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15497 const VkMemoryRequirements& vkMemReq,
15501 CallParams callParams;
15502 GetBasicParams(callParams);
15504 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15505 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15506 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15508 vkMemReq.alignment,
15509 vkMemReq.memoryTypeBits,
15517 userDataStr.GetString());
15521 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15522 const VkMemoryRequirements& vkMemReq,
15524 uint64_t allocationCount,
15527 CallParams callParams;
15528 GetBasicParams(callParams);
15530 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15531 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15532 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15534 vkMemReq.alignment,
15535 vkMemReq.memoryTypeBits,
15542 PrintPointerList(allocationCount, pAllocations);
15543 fprintf(m_File,
",%s\n", userDataStr.GetString());
15547 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15548 const VkMemoryRequirements& vkMemReq,
15549 bool requiresDedicatedAllocation,
15550 bool prefersDedicatedAllocation,
15554 CallParams callParams;
15555 GetBasicParams(callParams);
15557 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15558 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15559 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15561 vkMemReq.alignment,
15562 vkMemReq.memoryTypeBits,
15563 requiresDedicatedAllocation ? 1 : 0,
15564 prefersDedicatedAllocation ? 1 : 0,
15572 userDataStr.GetString());
15576 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15577 const VkMemoryRequirements& vkMemReq,
15578 bool requiresDedicatedAllocation,
15579 bool prefersDedicatedAllocation,
15583 CallParams callParams;
15584 GetBasicParams(callParams);
15586 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15587 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15588 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15590 vkMemReq.alignment,
15591 vkMemReq.memoryTypeBits,
15592 requiresDedicatedAllocation ? 1 : 0,
15593 prefersDedicatedAllocation ? 1 : 0,
15601 userDataStr.GetString());
15605 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15608 CallParams callParams;
15609 GetBasicParams(callParams);
15611 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15612 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15617 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15618 uint64_t allocationCount,
15621 CallParams callParams;
15622 GetBasicParams(callParams);
15624 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15625 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15626 PrintPointerList(allocationCount, pAllocations);
15627 fprintf(m_File,
"\n");
15631 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15633 const void* pUserData)
15635 CallParams callParams;
15636 GetBasicParams(callParams);
15638 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15639 UserDataString userDataStr(
15642 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15644 userDataStr.GetString());
15648 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15651 CallParams callParams;
15652 GetBasicParams(callParams);
15654 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15655 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15660 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15663 CallParams callParams;
15664 GetBasicParams(callParams);
15666 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15667 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15672 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15675 CallParams callParams;
15676 GetBasicParams(callParams);
15678 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15679 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15684 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15685 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15687 CallParams callParams;
15688 GetBasicParams(callParams);
15690 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15691 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15698 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15699 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15701 CallParams callParams;
15702 GetBasicParams(callParams);
15704 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15705 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15712 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15713 const VkBufferCreateInfo& bufCreateInfo,
15717 CallParams callParams;
15718 GetBasicParams(callParams);
15720 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15721 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15722 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15723 bufCreateInfo.flags,
15724 bufCreateInfo.size,
15725 bufCreateInfo.usage,
15726 bufCreateInfo.sharingMode,
15727 allocCreateInfo.
flags,
15728 allocCreateInfo.
usage,
15732 allocCreateInfo.
pool,
15734 userDataStr.GetString());
15738 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15739 const VkImageCreateInfo& imageCreateInfo,
15743 CallParams callParams;
15744 GetBasicParams(callParams);
15746 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15747 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15748 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15749 imageCreateInfo.flags,
15750 imageCreateInfo.imageType,
15751 imageCreateInfo.format,
15752 imageCreateInfo.extent.width,
15753 imageCreateInfo.extent.height,
15754 imageCreateInfo.extent.depth,
15755 imageCreateInfo.mipLevels,
15756 imageCreateInfo.arrayLayers,
15757 imageCreateInfo.samples,
15758 imageCreateInfo.tiling,
15759 imageCreateInfo.usage,
15760 imageCreateInfo.sharingMode,
15761 imageCreateInfo.initialLayout,
15762 allocCreateInfo.
flags,
15763 allocCreateInfo.
usage,
15767 allocCreateInfo.
pool,
15769 userDataStr.GetString());
15773 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15776 CallParams callParams;
15777 GetBasicParams(callParams);
15779 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15780 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15785 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15788 CallParams callParams;
15789 GetBasicParams(callParams);
15791 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15792 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15797 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15800 CallParams callParams;
15801 GetBasicParams(callParams);
15803 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15804 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15809 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15812 CallParams callParams;
15813 GetBasicParams(callParams);
15815 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15816 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15821 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15824 CallParams callParams;
15825 GetBasicParams(callParams);
15827 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15828 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15833 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15837 CallParams callParams;
15838 GetBasicParams(callParams);
15840 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15841 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15844 fprintf(m_File,
",");
15846 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15856 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15859 CallParams callParams;
15860 GetBasicParams(callParams);
15862 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15863 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15868 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15872 CallParams callParams;
15873 GetBasicParams(callParams);
15875 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15876 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15877 pool, name != VMA_NULL ? name :
"");
15883 if(pUserData != VMA_NULL)
15887 m_Str = (
const char*)pUserData;
15892 snprintf(m_PtrStr, 17,
"%p", pUserData);
15902 void VmaRecorder::WriteConfiguration(
15903 const VkPhysicalDeviceProperties& devProps,
15904 const VkPhysicalDeviceMemoryProperties& memProps,
15905 uint32_t vulkanApiVersion,
15906 bool dedicatedAllocationExtensionEnabled,
15907 bool bindMemory2ExtensionEnabled,
15908 bool memoryBudgetExtensionEnabled,
15909 bool deviceCoherentMemoryExtensionEnabled)
15911 fprintf(m_File,
"Config,Begin\n");
15913 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15915 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15916 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15917 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15918 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15919 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15920 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15922 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15923 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15924 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15926 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15927 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15929 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15930 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15932 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15933 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15935 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15936 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15939 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15940 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15941 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15942 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15944 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15945 fprintf(m_File,
"Macro,VMA_MIN_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_MIN_ALIGNMENT);
15946 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15947 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15948 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15949 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15950 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15951 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15952 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15954 fprintf(m_File,
"Config,End\n");
15957 void VmaRecorder::GetBasicParams(CallParams& outParams)
15959 #if defined(_WIN32)
15960 outParams.threadId = GetCurrentThreadId();
15965 std::thread::id thread_id = std::this_thread::get_id();
15966 std::stringstream thread_id_to_string_converter;
15967 thread_id_to_string_converter << thread_id;
15968 std::string thread_id_as_string = thread_id_to_string_converter.str();
15969 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15972 auto current_time = std::chrono::high_resolution_clock::now();
15974 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15977 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15981 fprintf(m_File,
"%p", pItems[0]);
15982 for(uint64_t i = 1; i < count; ++i)
15984 fprintf(m_File,
" %p", pItems[i]);
15989 void VmaRecorder::Flush()
16002 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
16003 m_Allocator(pAllocationCallbacks, 1024)
16007 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
16009 VmaMutexLock mutexLock(m_Mutex);
16010 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
16013 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
16015 VmaMutexLock mutexLock(m_Mutex);
16016 m_Allocator.Free(hAlloc);
16024 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
16031 m_hDevice(pCreateInfo->device),
16032 m_hInstance(pCreateInfo->instance),
16033 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
16034 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
16035 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
16036 m_AllocationObjectAllocator(&m_AllocationCallbacks),
16037 m_HeapSizeLimitMask(0),
16038 m_DeviceMemoryCount(0),
16039 m_PreferredLargeHeapBlockSize(0),
16040 m_PhysicalDevice(pCreateInfo->physicalDevice),
16041 m_CurrentFrameIndex(0),
16042 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
16044 m_GlobalMemoryTypeBits(UINT32_MAX)
16046 ,m_pRecorder(VMA_NULL)
16049 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16051 m_UseKhrDedicatedAllocation =
false;
16052 m_UseKhrBindMemory2 =
false;
16055 if(VMA_DEBUG_DETECT_CORRUPTION)
16058 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
16063 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
16065 #if !(VMA_DEDICATED_ALLOCATION)
16068 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
16071 #if !(VMA_BIND_MEMORY2)
16074 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
16078 #if !(VMA_MEMORY_BUDGET)
16081 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
16084 #if !(VMA_BUFFER_DEVICE_ADDRESS)
16085 if(m_UseKhrBufferDeviceAddress)
16087 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16090 #if VMA_VULKAN_VERSION < 1002000
16091 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
16093 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
16096 #if VMA_VULKAN_VERSION < 1001000
16097 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16099 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
16102 #if !(VMA_MEMORY_PRIORITY)
16103 if(m_UseExtMemoryPriority)
16105 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16109 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
16110 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
16111 memset(&m_MemProps, 0,
sizeof(m_MemProps));
16113 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
16114 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
16126 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
16127 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
16129 VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT));
16130 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
16131 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
16132 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
16137 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
16142 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16144 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
16145 if(limit != VK_WHOLE_SIZE)
16147 m_HeapSizeLimitMask |= 1u << heapIndex;
16148 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
16150 m_MemProps.memoryHeaps[heapIndex].size = limit;
16156 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16158 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
16160 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
16164 preferredBlockSize,
16167 GetBufferImageGranularity(),
16172 GetMemoryTypeMinAlignment(memTypeIndex),
16181 VkResult res = VK_SUCCESS;
16186 #if VMA_RECORDING_ENABLED
16187 m_pRecorder = vma_new(
this, VmaRecorder)();
16189 if(res != VK_SUCCESS)
16193 m_pRecorder->WriteConfiguration(
16194 m_PhysicalDeviceProperties,
16196 m_VulkanApiVersion,
16197 m_UseKhrDedicatedAllocation,
16198 m_UseKhrBindMemory2,
16199 m_UseExtMemoryBudget,
16200 m_UseAmdDeviceCoherentMemory);
16201 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
16203 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
16204 return VK_ERROR_FEATURE_NOT_PRESENT;
16208 #if VMA_MEMORY_BUDGET
16209 if(m_UseExtMemoryBudget)
16211 UpdateVulkanBudget();
16218 VmaAllocator_T::~VmaAllocator_T()
16220 #if VMA_RECORDING_ENABLED
16221 if(m_pRecorder != VMA_NULL)
16223 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
16224 vma_delete(
this, m_pRecorder);
16228 VMA_ASSERT(m_Pools.IsEmpty());
16230 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
16232 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
16234 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
16237 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
16241 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
16243 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16244 ImportVulkanFunctions_Static();
16247 if(pVulkanFunctions != VMA_NULL)
16249 ImportVulkanFunctions_Custom(pVulkanFunctions);
16252 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16253 ImportVulkanFunctions_Dynamic();
16256 ValidateVulkanFunctions();
16259 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16261 void VmaAllocator_T::ImportVulkanFunctions_Static()
16264 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
16265 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
16266 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
16267 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
16268 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
16269 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
16270 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
16271 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
16272 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
16273 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
16274 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
16275 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
16276 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
16277 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
16278 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
16279 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
16280 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
16283 #if VMA_VULKAN_VERSION >= 1001000
16284 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16286 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
16287 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
16288 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
16289 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
16290 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
16297 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
16299 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
16301 #define VMA_COPY_IF_NOT_NULL(funcName) \
16302 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
16304 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
16305 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
16306 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
16307 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
16308 VMA_COPY_IF_NOT_NULL(vkMapMemory);
16309 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
16310 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
16311 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
16312 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
16313 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
16314 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
16315 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
16316 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
16317 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
16318 VMA_COPY_IF_NOT_NULL(vkCreateImage);
16319 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
16320 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
16322 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16323 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
16324 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
16327 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16328 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
16329 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
16332 #if VMA_MEMORY_BUDGET
16333 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
16336 #undef VMA_COPY_IF_NOT_NULL
16339 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16341 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
16343 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
16344 if(m_VulkanFunctions.memberName == VMA_NULL) \
16345 m_VulkanFunctions.memberName = \
16346 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
16347 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
16348 if(m_VulkanFunctions.memberName == VMA_NULL) \
16349 m_VulkanFunctions.memberName = \
16350 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
16352 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
16353 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
16354 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
16355 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
16356 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
16357 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
16358 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
16359 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
16360 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
16361 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
16362 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
16363 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
16364 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
16365 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
16366 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
16367 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
16368 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16370 #if VMA_VULKAN_VERSION >= 1001000
16371 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16373 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16374 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16375 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16376 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16377 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16381 #if VMA_DEDICATED_ALLOCATION
16382 if(m_UseKhrDedicatedAllocation)
16384 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16385 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16389 #if VMA_BIND_MEMORY2
16390 if(m_UseKhrBindMemory2)
16392 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16393 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16397 #if VMA_MEMORY_BUDGET
16398 if(m_UseExtMemoryBudget)
16400 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16404 #undef VMA_FETCH_DEVICE_FUNC
16405 #undef VMA_FETCH_INSTANCE_FUNC
16410 void VmaAllocator_T::ValidateVulkanFunctions()
16412 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16413 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16414 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16415 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16416 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16417 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16418 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16419 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16420 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16421 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16422 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16423 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16424 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16425 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16426 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16427 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16428 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16430 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16431 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16433 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16434 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16438 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16439 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16441 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16442 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16446 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16447 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16449 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16454 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16456 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16457 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16458 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16459 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16462 VkResult VmaAllocator_T::AllocateMemoryOfType(
16464 VkDeviceSize alignment,
16465 bool dedicatedAllocation,
16466 VkBuffer dedicatedBuffer,
16467 VkBufferUsageFlags dedicatedBufferUsage,
16468 VkImage dedicatedImage,
16470 uint32_t memTypeIndex,
16471 VmaSuballocationType suballocType,
16472 size_t allocationCount,
16475 VMA_ASSERT(pAllocations != VMA_NULL);
16476 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16482 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16492 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16493 VMA_ASSERT(blockVector);
16495 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16496 bool preferDedicatedMemory =
16497 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16498 dedicatedAllocation ||
16500 size > preferredBlockSize / 2;
16502 if(preferDedicatedMemory &&
16504 finalCreateInfo.
pool == VK_NULL_HANDLE)
16513 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16517 return AllocateDedicatedMemory(
16527 dedicatedBufferUsage,
16535 VkResult res = blockVector->Allocate(
16536 m_CurrentFrameIndex.load(),
16543 if(res == VK_SUCCESS)
16551 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16557 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
16559 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16562 res = AllocateDedicatedMemory(
16572 dedicatedBufferUsage,
16576 if(res == VK_SUCCESS)
16579 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16585 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16591 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16593 VmaSuballocationType suballocType,
16594 uint32_t memTypeIndex,
16597 bool isUserDataString,
16600 VkBuffer dedicatedBuffer,
16601 VkBufferUsageFlags dedicatedBufferUsage,
16602 VkImage dedicatedImage,
16603 size_t allocationCount,
16606 VMA_ASSERT(allocationCount > 0 && pAllocations);
16610 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16612 GetBudget(&heapBudget, heapIndex, 1);
16613 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16615 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16619 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16620 allocInfo.memoryTypeIndex = memTypeIndex;
16621 allocInfo.allocationSize = size;
16623 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16624 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16625 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16627 if(dedicatedBuffer != VK_NULL_HANDLE)
16629 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16630 dedicatedAllocInfo.buffer = dedicatedBuffer;
16631 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16633 else if(dedicatedImage != VK_NULL_HANDLE)
16635 dedicatedAllocInfo.image = dedicatedImage;
16636 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16641 #if VMA_BUFFER_DEVICE_ADDRESS
16642 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16643 if(m_UseKhrBufferDeviceAddress)
16645 bool canContainBufferWithDeviceAddress =
true;
16646 if(dedicatedBuffer != VK_NULL_HANDLE)
16648 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16649 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16651 else if(dedicatedImage != VK_NULL_HANDLE)
16653 canContainBufferWithDeviceAddress =
false;
16655 if(canContainBufferWithDeviceAddress)
16657 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16658 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16663 #if VMA_MEMORY_PRIORITY
16664 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
16665 if(m_UseExtMemoryPriority)
16667 priorityInfo.priority = priority;
16668 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
16673 VkResult res = VK_SUCCESS;
16674 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16676 res = AllocateDedicatedMemoryPage(
16684 pAllocations + allocIndex);
16685 if(res != VK_SUCCESS)
16691 if(res == VK_SUCCESS)
16695 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16696 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
16697 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16699 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
16703 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16708 while(allocIndex--)
16711 VkDeviceMemory hMemory = currAlloc->GetMemory();
16723 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16724 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16725 currAlloc->SetUserData(
this, VMA_NULL);
16726 m_AllocationObjectAllocator.Free(currAlloc);
16729 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16735 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16737 VmaSuballocationType suballocType,
16738 uint32_t memTypeIndex,
16739 const VkMemoryAllocateInfo& allocInfo,
16741 bool isUserDataString,
16745 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16746 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16749 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16753 void* pMappedData = VMA_NULL;
16756 res = (*m_VulkanFunctions.vkMapMemory)(
16765 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16766 FreeVulkanMemory(memTypeIndex, size, hMemory);
16771 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16772 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16773 (*pAllocation)->SetUserData(
this, pUserData);
16774 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16775 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16777 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16783 void VmaAllocator_T::GetBufferMemoryRequirements(
16785 VkMemoryRequirements& memReq,
16786 bool& requiresDedicatedAllocation,
16787 bool& prefersDedicatedAllocation)
const
16789 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16790 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16792 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16793 memReqInfo.buffer = hBuffer;
16795 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16797 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16798 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16800 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16802 memReq = memReq2.memoryRequirements;
16803 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16804 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16809 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16810 requiresDedicatedAllocation =
false;
16811 prefersDedicatedAllocation =
false;
16815 void VmaAllocator_T::GetImageMemoryRequirements(
16817 VkMemoryRequirements& memReq,
16818 bool& requiresDedicatedAllocation,
16819 bool& prefersDedicatedAllocation)
const
16821 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16822 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16824 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16825 memReqInfo.image = hImage;
16827 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16829 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16830 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16832 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16834 memReq = memReq2.memoryRequirements;
16835 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16836 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16841 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16842 requiresDedicatedAllocation =
false;
16843 prefersDedicatedAllocation =
false;
16847 VkResult VmaAllocator_T::AllocateMemory(
16848 const VkMemoryRequirements& vkMemReq,
16849 bool requiresDedicatedAllocation,
16850 bool prefersDedicatedAllocation,
16851 VkBuffer dedicatedBuffer,
16852 VkBufferUsageFlags dedicatedBufferUsage,
16853 VkImage dedicatedImage,
16855 VmaSuballocationType suballocType,
16856 size_t allocationCount,
16859 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16861 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16863 if(vkMemReq.size == 0)
16865 return VK_ERROR_VALIDATION_FAILED_EXT;
16870 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16871 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16876 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16877 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16879 if(requiresDedicatedAllocation)
16883 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16884 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16886 if(createInfo.
pool != VK_NULL_HANDLE)
16888 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16889 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16892 if((createInfo.
pool != VK_NULL_HANDLE) &&
16895 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16896 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16899 if(createInfo.
pool != VK_NULL_HANDLE)
16904 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16909 return createInfo.
pool->m_BlockVector.Allocate(
16910 m_CurrentFrameIndex.load(),
16912 vkMemReq.alignment,
16921 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16922 uint32_t memTypeIndex = UINT32_MAX;
16924 if(res == VK_SUCCESS)
16926 res = AllocateMemoryOfType(
16928 vkMemReq.alignment,
16929 requiresDedicatedAllocation || prefersDedicatedAllocation,
16931 dedicatedBufferUsage,
16939 if(res == VK_SUCCESS)
16949 memoryTypeBits &= ~(1u << memTypeIndex);
16952 if(res == VK_SUCCESS)
16954 res = AllocateMemoryOfType(
16956 vkMemReq.alignment,
16957 requiresDedicatedAllocation || prefersDedicatedAllocation,
16959 dedicatedBufferUsage,
16967 if(res == VK_SUCCESS)
16977 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16988 void VmaAllocator_T::FreeMemory(
16989 size_t allocationCount,
16992 VMA_ASSERT(pAllocations);
16994 for(
size_t allocIndex = allocationCount; allocIndex--; )
16998 if(allocation != VK_NULL_HANDLE)
17000 if(TouchAllocation(allocation))
17002 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
17004 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
17007 switch(allocation->GetType())
17009 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17011 VmaBlockVector* pBlockVector = VMA_NULL;
17012 VmaPool hPool = allocation->GetBlock()->GetParentPool();
17013 if(hPool != VK_NULL_HANDLE)
17015 pBlockVector = &hPool->m_BlockVector;
17019 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17020 pBlockVector = m_pBlockVectors[memTypeIndex];
17022 pBlockVector->Free(allocation);
17025 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17026 FreeDedicatedMemory(allocation);
17034 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
17035 allocation->SetUserData(
this, VMA_NULL);
17036 m_AllocationObjectAllocator.Free(allocation);
17041 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
17044 InitStatInfo(pStats->
total);
17045 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
17047 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
17051 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17053 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17054 VMA_ASSERT(pBlockVector);
17055 pBlockVector->AddStats(pStats);
17060 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17061 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17063 pool->m_BlockVector.AddStats(pStats);
17068 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17070 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
17071 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17072 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17074 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
17077 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
17078 VmaAddStatInfo(pStats->
total, allocationStatInfo);
17079 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
17080 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
17085 VmaPostprocessCalcStatInfo(pStats->
total);
17086 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
17087 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
17088 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
17089 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
17092 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
17094 #if VMA_MEMORY_BUDGET
17095 if(m_UseExtMemoryBudget)
17097 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
17099 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
17100 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17102 const uint32_t heapIndex = firstHeap + i;
17104 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17107 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
17109 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
17110 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17114 outBudget->
usage = 0;
17118 outBudget->
budget = VMA_MIN(
17119 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
17124 UpdateVulkanBudget();
17125 GetBudget(outBudget, firstHeap, heapCount);
17131 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17133 const uint32_t heapIndex = firstHeap + i;
17135 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17139 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17144 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
17146 VkResult VmaAllocator_T::DefragmentationBegin(
17156 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
17157 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
17160 (*pContext)->AddAllocations(
17163 VkResult res = (*pContext)->Defragment(
17168 if(res != VK_NOT_READY)
17170 vma_delete(
this, *pContext);
17171 *pContext = VMA_NULL;
17177 VkResult VmaAllocator_T::DefragmentationEnd(
17180 vma_delete(
this, context);
17184 VkResult VmaAllocator_T::DefragmentationPassBegin(
17188 return context->DefragmentPassBegin(pInfo);
17190 VkResult VmaAllocator_T::DefragmentationPassEnd(
17193 return context->DefragmentPassEnd();
17199 if(hAllocation->CanBecomeLost())
17205 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17206 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17209 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17213 pAllocationInfo->
offset = 0;
17214 pAllocationInfo->
size = hAllocation->GetSize();
17216 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17219 else if(localLastUseFrameIndex == localCurrFrameIndex)
17221 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17222 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17223 pAllocationInfo->
offset = hAllocation->GetOffset();
17224 pAllocationInfo->
size = hAllocation->GetSize();
17226 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17231 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17233 localLastUseFrameIndex = localCurrFrameIndex;
17240 #if VMA_STATS_STRING_ENABLED
17241 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17242 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17245 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17246 if(localLastUseFrameIndex == localCurrFrameIndex)
17252 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17254 localLastUseFrameIndex = localCurrFrameIndex;
17260 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17261 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17262 pAllocationInfo->
offset = hAllocation->GetOffset();
17263 pAllocationInfo->
size = hAllocation->GetSize();
17264 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
17265 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17269 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
17272 if(hAllocation->CanBecomeLost())
17274 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17275 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17278 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17282 else if(localLastUseFrameIndex == localCurrFrameIndex)
17288 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17290 localLastUseFrameIndex = localCurrFrameIndex;
17297 #if VMA_STATS_STRING_ENABLED
17298 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17299 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17302 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17303 if(localLastUseFrameIndex == localCurrFrameIndex)
17309 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17311 localLastUseFrameIndex = localCurrFrameIndex;
17323 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
17339 return VK_ERROR_INITIALIZATION_FAILED;
17343 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
17345 return VK_ERROR_FEATURE_NOT_PRESENT;
17352 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
17354 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
17356 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
17357 if(res != VK_SUCCESS)
17359 vma_delete(
this, *pPool);
17366 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17367 (*pPool)->SetId(m_NextPoolId++);
17368 m_Pools.PushBack(*pPool);
17374 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17378 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17379 m_Pools.Remove(pool);
17382 vma_delete(
this, pool);
17387 pool->m_BlockVector.GetPoolStats(pPoolStats);
17390 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17392 m_CurrentFrameIndex.store(frameIndex);
17394 #if VMA_MEMORY_BUDGET
17395 if(m_UseExtMemoryBudget)
17397 UpdateVulkanBudget();
17402 void VmaAllocator_T::MakePoolAllocationsLost(
17404 size_t* pLostAllocationCount)
17406 hPool->m_BlockVector.MakePoolAllocationsLost(
17407 m_CurrentFrameIndex.load(),
17408 pLostAllocationCount);
17411 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17413 return hPool->m_BlockVector.CheckCorruption();
17416 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17418 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17421 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17423 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17425 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17426 VMA_ASSERT(pBlockVector);
17427 VkResult localRes = pBlockVector->CheckCorruption();
17430 case VK_ERROR_FEATURE_NOT_PRESENT:
17433 finalRes = VK_SUCCESS;
17443 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17444 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17446 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17448 VkResult localRes = pool->m_BlockVector.CheckCorruption();
17451 case VK_ERROR_FEATURE_NOT_PRESENT:
17454 finalRes = VK_SUCCESS;
17466 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17468 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17469 (*pAllocation)->InitLost();
17473 template<
typename T>
17474 struct AtomicTransactionalIncrement
17477 typedef std::atomic<T> AtomicT;
17478 ~AtomicTransactionalIncrement()
17483 T Increment(AtomicT* atomic)
17486 return m_Atomic->fetch_add(1);
17490 m_Atomic =
nullptr;
17494 AtomicT* m_Atomic =
nullptr;
17497 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17499 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
17500 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
17501 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
17502 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
17504 return VK_ERROR_TOO_MANY_OBJECTS;
17508 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17511 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17513 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17514 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17517 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17518 if(blockBytesAfterAllocation > heapSize)
17520 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17522 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17530 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17534 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17536 if(res == VK_SUCCESS)
17538 #if VMA_MEMORY_BUDGET
17539 ++m_Budget.m_OperationsSinceBudgetFetch;
17543 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17545 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17548 deviceMemoryCountIncrement.Commit();
17552 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17558 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17561 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17563 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17567 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17569 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17571 --m_DeviceMemoryCount;
17574 VkResult VmaAllocator_T::BindVulkanBuffer(
17575 VkDeviceMemory memory,
17576 VkDeviceSize memoryOffset,
17580 if(pNext != VMA_NULL)
17582 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17583 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17584 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17586 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17587 bindBufferMemoryInfo.pNext = pNext;
17588 bindBufferMemoryInfo.buffer = buffer;
17589 bindBufferMemoryInfo.memory = memory;
17590 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17591 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17596 return VK_ERROR_EXTENSION_NOT_PRESENT;
17601 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17605 VkResult VmaAllocator_T::BindVulkanImage(
17606 VkDeviceMemory memory,
17607 VkDeviceSize memoryOffset,
17611 if(pNext != VMA_NULL)
17613 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17614 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17615 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17617 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17618 bindBufferMemoryInfo.pNext = pNext;
17619 bindBufferMemoryInfo.image = image;
17620 bindBufferMemoryInfo.memory = memory;
17621 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17622 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17627 return VK_ERROR_EXTENSION_NOT_PRESENT;
17632 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17636 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17638 if(hAllocation->CanBecomeLost())
17640 return VK_ERROR_MEMORY_MAP_FAILED;
17643 switch(hAllocation->GetType())
17645 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17647 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17648 char *pBytes = VMA_NULL;
17649 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17650 if(res == VK_SUCCESS)
17652 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17653 hAllocation->BlockAllocMap();
17657 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17658 return hAllocation->DedicatedAllocMap(
this, ppData);
17661 return VK_ERROR_MEMORY_MAP_FAILED;
17667 switch(hAllocation->GetType())
17669 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17671 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17672 hAllocation->BlockAllocUnmap();
17673 pBlock->Unmap(
this, 1);
17676 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17677 hAllocation->DedicatedAllocUnmap(
this);
17684 VkResult VmaAllocator_T::BindBufferMemory(
17686 VkDeviceSize allocationLocalOffset,
17690 VkResult res = VK_SUCCESS;
17691 switch(hAllocation->GetType())
17693 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17694 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17696 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17698 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17699 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17700 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17709 VkResult VmaAllocator_T::BindImageMemory(
17711 VkDeviceSize allocationLocalOffset,
17715 VkResult res = VK_SUCCESS;
17716 switch(hAllocation->GetType())
17718 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17719 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17721 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17723 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17724 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17725 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17734 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17736 VkDeviceSize offset, VkDeviceSize size,
17737 VMA_CACHE_OPERATION op)
17739 VkResult res = VK_SUCCESS;
17741 VkMappedMemoryRange memRange = {};
17742 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17746 case VMA_CACHE_FLUSH:
17747 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17749 case VMA_CACHE_INVALIDATE:
17750 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17760 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17761 uint32_t allocationCount,
17763 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17764 VMA_CACHE_OPERATION op)
17766 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17767 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17768 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17770 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17773 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17774 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17775 VkMappedMemoryRange newRange;
17776 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17778 ranges.push_back(newRange);
17782 VkResult res = VK_SUCCESS;
17783 if(!ranges.empty())
17787 case VMA_CACHE_FLUSH:
17788 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17790 case VMA_CACHE_INVALIDATE:
17791 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17801 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17803 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17805 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17807 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17808 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
17809 dedicatedAllocations.Remove(allocation);
17812 VkDeviceMemory hMemory = allocation->GetMemory();
17824 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17826 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17829 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17831 VkBufferCreateInfo dummyBufCreateInfo;
17832 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17834 uint32_t memoryTypeBits = 0;
17837 VkBuffer buf = VK_NULL_HANDLE;
17838 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17839 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17840 if(res == VK_SUCCESS)
17843 VkMemoryRequirements memReq;
17844 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17845 memoryTypeBits = memReq.memoryTypeBits;
17848 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17851 return memoryTypeBits;
17854 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17857 VMA_ASSERT(GetMemoryTypeCount() > 0);
17859 uint32_t memoryTypeBits = UINT32_MAX;
17861 if(!m_UseAmdDeviceCoherentMemory)
17864 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17866 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17868 memoryTypeBits &= ~(1u << memTypeIndex);
17873 return memoryTypeBits;
17876 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17878 VkDeviceSize offset, VkDeviceSize size,
17879 VkMappedMemoryRange& outRange)
const
17881 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17882 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17884 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17885 const VkDeviceSize allocationSize = allocation->GetSize();
17886 VMA_ASSERT(offset <= allocationSize);
17888 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17889 outRange.pNext = VMA_NULL;
17890 outRange.memory = allocation->GetMemory();
17892 switch(allocation->GetType())
17894 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17895 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17896 if(size == VK_WHOLE_SIZE)
17898 outRange.size = allocationSize - outRange.offset;
17902 VMA_ASSERT(offset + size <= allocationSize);
17903 outRange.size = VMA_MIN(
17904 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17905 allocationSize - outRange.offset);
17908 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17911 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17912 if(size == VK_WHOLE_SIZE)
17914 size = allocationSize - offset;
17918 VMA_ASSERT(offset + size <= allocationSize);
17920 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17923 const VkDeviceSize allocationOffset = allocation->GetOffset();
17924 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17925 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17926 outRange.offset += allocationOffset;
17927 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17939 #if VMA_MEMORY_BUDGET
17941 void VmaAllocator_T::UpdateVulkanBudget()
17943 VMA_ASSERT(m_UseExtMemoryBudget);
17945 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17947 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17948 VmaPnextChainPushFront(&memProps, &budgetProps);
17950 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17953 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17955 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17957 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17958 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17959 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17962 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17964 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17966 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17968 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17970 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17972 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17975 m_Budget.m_OperationsSinceBudgetFetch = 0;
17981 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17983 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17984 !hAllocation->CanBecomeLost() &&
17985 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17987 void* pData = VMA_NULL;
17988 VkResult res = Map(hAllocation, &pData);
17989 if(res == VK_SUCCESS)
17991 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17992 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17993 Unmap(hAllocation);
17997 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
18002 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
18004 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
18005 if(memoryTypeBits == UINT32_MAX)
18007 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
18008 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
18010 return memoryTypeBits;
18013 #if VMA_STATS_STRING_ENABLED
18015 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
18017 bool dedicatedAllocationsStarted =
false;
18018 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18020 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
18021 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
18022 if(!dedicatedAllocList.IsEmpty())
18024 if(dedicatedAllocationsStarted ==
false)
18026 dedicatedAllocationsStarted =
true;
18027 json.WriteString(
"DedicatedAllocations");
18028 json.BeginObject();
18031 json.BeginString(
"Type ");
18032 json.ContinueString(memTypeIndex);
18038 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
18040 json.BeginObject(
true);
18041 alloc->PrintParameters(json);
18048 if(dedicatedAllocationsStarted)
18054 bool allocationsStarted =
false;
18055 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18057 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
18059 if(allocationsStarted ==
false)
18061 allocationsStarted =
true;
18062 json.WriteString(
"DefaultPools");
18063 json.BeginObject();
18066 json.BeginString(
"Type ");
18067 json.ContinueString(memTypeIndex);
18070 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
18073 if(allocationsStarted)
18081 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
18082 if(!m_Pools.IsEmpty())
18084 json.WriteString(
"Pools");
18085 json.BeginObject();
18086 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
18088 json.BeginString();
18089 json.ContinueString(pool->GetId());
18092 pool->m_BlockVector.PrintDetailedMap(json);
18108 VMA_ASSERT(pCreateInfo && pAllocator);
18111 VMA_DEBUG_LOG(
"vmaCreateAllocator");
18113 return (*pAllocator)->Init(pCreateInfo);
18119 if(allocator != VK_NULL_HANDLE)
18121 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
18122 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
18123 vma_delete(&allocationCallbacks, allocator);
18129 VMA_ASSERT(allocator && pAllocatorInfo);
18130 pAllocatorInfo->
instance = allocator->m_hInstance;
18131 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
18132 pAllocatorInfo->
device = allocator->m_hDevice;
18137 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
18139 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
18140 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
18145 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
18147 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
18148 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
18153 uint32_t memoryTypeIndex,
18154 VkMemoryPropertyFlags* pFlags)
18156 VMA_ASSERT(allocator && pFlags);
18157 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
18158 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
18163 uint32_t frameIndex)
18165 VMA_ASSERT(allocator);
18166 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
18168 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18170 allocator->SetCurrentFrameIndex(frameIndex);
18177 VMA_ASSERT(allocator && pStats);
18178 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18179 allocator->CalculateStats(pStats);
18186 VMA_ASSERT(allocator && pBudget);
18187 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18188 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
18191 #if VMA_STATS_STRING_ENABLED
18195 char** ppStatsString,
18196 VkBool32 detailedMap)
18198 VMA_ASSERT(allocator && ppStatsString);
18199 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18201 VmaStringBuilder sb(allocator);
18203 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
18204 json.BeginObject();
18207 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
18210 allocator->CalculateStats(&stats);
18212 json.WriteString(
"Total");
18213 VmaPrintStatInfo(json, stats.
total);
18215 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
18217 json.BeginString(
"Heap ");
18218 json.ContinueString(heapIndex);
18220 json.BeginObject();
18222 json.WriteString(
"Size");
18223 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
18225 json.WriteString(
"Flags");
18226 json.BeginArray(
true);
18227 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
18229 json.WriteString(
"DEVICE_LOCAL");
18233 json.WriteString(
"Budget");
18234 json.BeginObject();
18236 json.WriteString(
"BlockBytes");
18237 json.WriteNumber(budget[heapIndex].blockBytes);
18238 json.WriteString(
"AllocationBytes");
18239 json.WriteNumber(budget[heapIndex].allocationBytes);
18240 json.WriteString(
"Usage");
18241 json.WriteNumber(budget[heapIndex].usage);
18242 json.WriteString(
"Budget");
18243 json.WriteNumber(budget[heapIndex].budget);
18249 json.WriteString(
"Stats");
18250 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
18253 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
18255 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
18257 json.BeginString(
"Type ");
18258 json.ContinueString(typeIndex);
18261 json.BeginObject();
18263 json.WriteString(
"Flags");
18264 json.BeginArray(
true);
18265 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
18266 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
18268 json.WriteString(
"DEVICE_LOCAL");
18270 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18272 json.WriteString(
"HOST_VISIBLE");
18274 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
18276 json.WriteString(
"HOST_COHERENT");
18278 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
18280 json.WriteString(
"HOST_CACHED");
18282 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
18284 json.WriteString(
"LAZILY_ALLOCATED");
18286 #if VMA_VULKAN_VERSION >= 1001000
18287 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
18289 json.WriteString(
"PROTECTED");
18292 #if VK_AMD_device_coherent_memory
18293 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
18295 json.WriteString(
"DEVICE_COHERENT");
18297 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
18299 json.WriteString(
"DEVICE_UNCACHED");
18306 json.WriteString(
"Stats");
18307 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
18316 if(detailedMap == VK_TRUE)
18318 allocator->PrintDetailedMap(json);
18324 const size_t len = sb.GetLength();
18325 char*
const pChars = vma_new_array(allocator,
char, len + 1);
18328 memcpy(pChars, sb.GetData(), len);
18330 pChars[len] =
'\0';
18331 *ppStatsString = pChars;
18336 char* pStatsString)
18338 if(pStatsString != VMA_NULL)
18340 VMA_ASSERT(allocator);
18341 size_t len = strlen(pStatsString);
18342 vma_delete_array(allocator, pStatsString, len + 1);
18353 uint32_t memoryTypeBits,
18355 uint32_t* pMemoryTypeIndex)
18357 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18358 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18359 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18361 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
18368 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
18369 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
18370 uint32_t notPreferredFlags = 0;
18373 switch(pAllocationCreateInfo->
usage)
18378 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18380 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18384 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
18387 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18388 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18390 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18394 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18395 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
18398 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18401 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18410 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18412 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18415 *pMemoryTypeIndex = UINT32_MAX;
18416 uint32_t minCost = UINT32_MAX;
18417 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18418 memTypeIndex < allocator->GetMemoryTypeCount();
18419 ++memTypeIndex, memTypeBit <<= 1)
18422 if((memTypeBit & memoryTypeBits) != 0)
18424 const VkMemoryPropertyFlags currFlags =
18425 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18427 if((requiredFlags & ~currFlags) == 0)
18430 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18431 VmaCountBitsSet(currFlags & notPreferredFlags);
18433 if(currCost < minCost)
18435 *pMemoryTypeIndex = memTypeIndex;
18440 minCost = currCost;
18445 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18450 const VkBufferCreateInfo* pBufferCreateInfo,
18452 uint32_t* pMemoryTypeIndex)
18454 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18455 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18456 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18457 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18459 const VkDevice hDev = allocator->m_hDevice;
18460 VkBuffer hBuffer = VK_NULL_HANDLE;
18461 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18462 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18463 if(res == VK_SUCCESS)
18465 VkMemoryRequirements memReq = {};
18466 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18467 hDev, hBuffer, &memReq);
18471 memReq.memoryTypeBits,
18472 pAllocationCreateInfo,
18475 allocator->GetVulkanFunctions().vkDestroyBuffer(
18476 hDev, hBuffer, allocator->GetAllocationCallbacks());
18483 const VkImageCreateInfo* pImageCreateInfo,
18485 uint32_t* pMemoryTypeIndex)
18487 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18488 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18489 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18490 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18492 const VkDevice hDev = allocator->m_hDevice;
18493 VkImage hImage = VK_NULL_HANDLE;
18494 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18495 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18496 if(res == VK_SUCCESS)
18498 VkMemoryRequirements memReq = {};
18499 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18500 hDev, hImage, &memReq);
18504 memReq.memoryTypeBits,
18505 pAllocationCreateInfo,
18508 allocator->GetVulkanFunctions().vkDestroyImage(
18509 hDev, hImage, allocator->GetAllocationCallbacks());
18519 VMA_ASSERT(allocator && pCreateInfo && pPool);
18521 VMA_DEBUG_LOG(
"vmaCreatePool");
18523 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18525 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18527 #if VMA_RECORDING_ENABLED
18528 if(allocator->GetRecorder() != VMA_NULL)
18530 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18541 VMA_ASSERT(allocator);
18543 if(pool == VK_NULL_HANDLE)
18548 VMA_DEBUG_LOG(
"vmaDestroyPool");
18550 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18552 #if VMA_RECORDING_ENABLED
18553 if(allocator->GetRecorder() != VMA_NULL)
18555 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18559 allocator->DestroyPool(pool);
18567 VMA_ASSERT(allocator && pool && pPoolStats);
18569 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18571 allocator->GetPoolStats(pool, pPoolStats);
18577 size_t* pLostAllocationCount)
18579 VMA_ASSERT(allocator && pool);
18581 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18583 #if VMA_RECORDING_ENABLED
18584 if(allocator->GetRecorder() != VMA_NULL)
18586 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18590 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18595 VMA_ASSERT(allocator && pool);
18597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18599 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18601 return allocator->CheckPoolCorruption(pool);
18607 const char** ppName)
18609 VMA_ASSERT(allocator && pool && ppName);
18611 VMA_DEBUG_LOG(
"vmaGetPoolName");
18613 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18615 *ppName = pool->GetName();
18623 VMA_ASSERT(allocator && pool);
18625 VMA_DEBUG_LOG(
"vmaSetPoolName");
18627 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18629 pool->SetName(pName);
18631 #if VMA_RECORDING_ENABLED
18632 if(allocator->GetRecorder() != VMA_NULL)
18634 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18641 const VkMemoryRequirements* pVkMemoryRequirements,
18646 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18648 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18650 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18652 VkResult result = allocator->AllocateMemory(
18653 *pVkMemoryRequirements,
18660 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18664 #if VMA_RECORDING_ENABLED
18665 if(allocator->GetRecorder() != VMA_NULL)
18667 allocator->GetRecorder()->RecordAllocateMemory(
18668 allocator->GetCurrentFrameIndex(),
18669 *pVkMemoryRequirements,
18675 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18677 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18685 const VkMemoryRequirements* pVkMemoryRequirements,
18687 size_t allocationCount,
18691 if(allocationCount == 0)
18696 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18698 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18700 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18702 VkResult result = allocator->AllocateMemory(
18703 *pVkMemoryRequirements,
18710 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18714 #if VMA_RECORDING_ENABLED
18715 if(allocator->GetRecorder() != VMA_NULL)
18717 allocator->GetRecorder()->RecordAllocateMemoryPages(
18718 allocator->GetCurrentFrameIndex(),
18719 *pVkMemoryRequirements,
18721 (uint64_t)allocationCount,
18726 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18728 for(
size_t i = 0; i < allocationCount; ++i)
18730 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18744 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18746 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18748 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18750 VkMemoryRequirements vkMemReq = {};
18751 bool requiresDedicatedAllocation =
false;
18752 bool prefersDedicatedAllocation =
false;
18753 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18754 requiresDedicatedAllocation,
18755 prefersDedicatedAllocation);
18757 VkResult result = allocator->AllocateMemory(
18759 requiresDedicatedAllocation,
18760 prefersDedicatedAllocation,
18765 VMA_SUBALLOCATION_TYPE_BUFFER,
18769 #if VMA_RECORDING_ENABLED
18770 if(allocator->GetRecorder() != VMA_NULL)
18772 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18773 allocator->GetCurrentFrameIndex(),
18775 requiresDedicatedAllocation,
18776 prefersDedicatedAllocation,
18782 if(pAllocationInfo && result == VK_SUCCESS)
18784 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18797 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18799 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18801 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18803 VkMemoryRequirements vkMemReq = {};
18804 bool requiresDedicatedAllocation =
false;
18805 bool prefersDedicatedAllocation =
false;
18806 allocator->GetImageMemoryRequirements(image, vkMemReq,
18807 requiresDedicatedAllocation, prefersDedicatedAllocation);
18809 VkResult result = allocator->AllocateMemory(
18811 requiresDedicatedAllocation,
18812 prefersDedicatedAllocation,
18817 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18821 #if VMA_RECORDING_ENABLED
18822 if(allocator->GetRecorder() != VMA_NULL)
18824 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18825 allocator->GetCurrentFrameIndex(),
18827 requiresDedicatedAllocation,
18828 prefersDedicatedAllocation,
18834 if(pAllocationInfo && result == VK_SUCCESS)
18836 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18846 VMA_ASSERT(allocator);
18848 if(allocation == VK_NULL_HANDLE)
18853 VMA_DEBUG_LOG(
"vmaFreeMemory");
18855 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18857 #if VMA_RECORDING_ENABLED
18858 if(allocator->GetRecorder() != VMA_NULL)
18860 allocator->GetRecorder()->RecordFreeMemory(
18861 allocator->GetCurrentFrameIndex(),
18866 allocator->FreeMemory(
18873 size_t allocationCount,
18876 if(allocationCount == 0)
18881 VMA_ASSERT(allocator);
18883 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18885 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18887 #if VMA_RECORDING_ENABLED
18888 if(allocator->GetRecorder() != VMA_NULL)
18890 allocator->GetRecorder()->RecordFreeMemoryPages(
18891 allocator->GetCurrentFrameIndex(),
18892 (uint64_t)allocationCount,
18897 allocator->FreeMemory(allocationCount, pAllocations);
18905 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18907 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18909 #if VMA_RECORDING_ENABLED
18910 if(allocator->GetRecorder() != VMA_NULL)
18912 allocator->GetRecorder()->RecordGetAllocationInfo(
18913 allocator->GetCurrentFrameIndex(),
18918 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18925 VMA_ASSERT(allocator && allocation);
18927 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18929 #if VMA_RECORDING_ENABLED
18930 if(allocator->GetRecorder() != VMA_NULL)
18932 allocator->GetRecorder()->RecordTouchAllocation(
18933 allocator->GetCurrentFrameIndex(),
18938 return allocator->TouchAllocation(allocation);
18946 VMA_ASSERT(allocator && allocation);
18948 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18950 allocation->SetUserData(allocator, pUserData);
18952 #if VMA_RECORDING_ENABLED
18953 if(allocator->GetRecorder() != VMA_NULL)
18955 allocator->GetRecorder()->RecordSetAllocationUserData(
18956 allocator->GetCurrentFrameIndex(),
18967 VMA_ASSERT(allocator && pAllocation);
18969 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18971 allocator->CreateLostAllocation(pAllocation);
18973 #if VMA_RECORDING_ENABLED
18974 if(allocator->GetRecorder() != VMA_NULL)
18976 allocator->GetRecorder()->RecordCreateLostAllocation(
18977 allocator->GetCurrentFrameIndex(),
18988 VMA_ASSERT(allocator && allocation && ppData);
18990 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18992 VkResult res = allocator->Map(allocation, ppData);
18994 #if VMA_RECORDING_ENABLED
18995 if(allocator->GetRecorder() != VMA_NULL)
18997 allocator->GetRecorder()->RecordMapMemory(
18998 allocator->GetCurrentFrameIndex(),
19010 VMA_ASSERT(allocator && allocation);
19012 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19014 #if VMA_RECORDING_ENABLED
19015 if(allocator->GetRecorder() != VMA_NULL)
19017 allocator->GetRecorder()->RecordUnmapMemory(
19018 allocator->GetCurrentFrameIndex(),
19023 allocator->Unmap(allocation);
19028 VMA_ASSERT(allocator && allocation);
19030 VMA_DEBUG_LOG(
"vmaFlushAllocation");
19032 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19034 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
19036 #if VMA_RECORDING_ENABLED
19037 if(allocator->GetRecorder() != VMA_NULL)
19039 allocator->GetRecorder()->RecordFlushAllocation(
19040 allocator->GetCurrentFrameIndex(),
19041 allocation, offset, size);
19050 VMA_ASSERT(allocator && allocation);
19052 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
19054 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19056 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
19058 #if VMA_RECORDING_ENABLED
19059 if(allocator->GetRecorder() != VMA_NULL)
19061 allocator->GetRecorder()->RecordInvalidateAllocation(
19062 allocator->GetCurrentFrameIndex(),
19063 allocation, offset, size);
19072 uint32_t allocationCount,
19074 const VkDeviceSize* offsets,
19075 const VkDeviceSize* sizes)
19077 VMA_ASSERT(allocator);
19079 if(allocationCount == 0)
19084 VMA_ASSERT(allocations);
19086 VMA_DEBUG_LOG(
"vmaFlushAllocations");
19088 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19090 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
19092 #if VMA_RECORDING_ENABLED
19093 if(allocator->GetRecorder() != VMA_NULL)
19104 uint32_t allocationCount,
19106 const VkDeviceSize* offsets,
19107 const VkDeviceSize* sizes)
19109 VMA_ASSERT(allocator);
19111 if(allocationCount == 0)
19116 VMA_ASSERT(allocations);
19118 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
19120 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19122 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
19124 #if VMA_RECORDING_ENABLED
19125 if(allocator->GetRecorder() != VMA_NULL)
19136 VMA_ASSERT(allocator);
19138 VMA_DEBUG_LOG(
"vmaCheckCorruption");
19140 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19142 return allocator->CheckCorruption(memoryTypeBits);
19148 size_t allocationCount,
19149 VkBool32* pAllocationsChanged,
19159 if(pDefragmentationInfo != VMA_NULL)
19173 if(res == VK_NOT_READY)
19186 VMA_ASSERT(allocator && pInfo && pContext);
19197 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
19199 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
19201 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19203 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
19205 #if VMA_RECORDING_ENABLED
19206 if(allocator->GetRecorder() != VMA_NULL)
19208 allocator->GetRecorder()->RecordDefragmentationBegin(
19209 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
19220 VMA_ASSERT(allocator);
19222 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
19224 if(context != VK_NULL_HANDLE)
19226 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19228 #if VMA_RECORDING_ENABLED
19229 if(allocator->GetRecorder() != VMA_NULL)
19231 allocator->GetRecorder()->RecordDefragmentationEnd(
19232 allocator->GetCurrentFrameIndex(), context);
19236 return allocator->DefragmentationEnd(context);
19250 VMA_ASSERT(allocator);
19253 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
19255 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19257 if(context == VK_NULL_HANDLE)
19263 return allocator->DefragmentationPassBegin(pInfo, context);
19269 VMA_ASSERT(allocator);
19271 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
19272 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19274 if(context == VK_NULL_HANDLE)
19277 return allocator->DefragmentationPassEnd(context);
19285 VMA_ASSERT(allocator && allocation && buffer);
19287 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
19289 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19291 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
19297 VkDeviceSize allocationLocalOffset,
19301 VMA_ASSERT(allocator && allocation && buffer);
19303 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
19305 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19307 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
19315 VMA_ASSERT(allocator && allocation && image);
19317 VMA_DEBUG_LOG(
"vmaBindImageMemory");
19319 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19321 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
19327 VkDeviceSize allocationLocalOffset,
19331 VMA_ASSERT(allocator && allocation && image);
19333 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
19335 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19337 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
19342 const VkBufferCreateInfo* pBufferCreateInfo,
19348 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
19350 if(pBufferCreateInfo->size == 0)
19352 return VK_ERROR_VALIDATION_FAILED_EXT;
19354 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19355 !allocator->m_UseKhrBufferDeviceAddress)
19357 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19358 return VK_ERROR_VALIDATION_FAILED_EXT;
19361 VMA_DEBUG_LOG(
"vmaCreateBuffer");
19363 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19365 *pBuffer = VK_NULL_HANDLE;
19366 *pAllocation = VK_NULL_HANDLE;
19369 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19370 allocator->m_hDevice,
19372 allocator->GetAllocationCallbacks(),
19377 VkMemoryRequirements vkMemReq = {};
19378 bool requiresDedicatedAllocation =
false;
19379 bool prefersDedicatedAllocation =
false;
19380 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19381 requiresDedicatedAllocation, prefersDedicatedAllocation);
19384 res = allocator->AllocateMemory(
19386 requiresDedicatedAllocation,
19387 prefersDedicatedAllocation,
19389 pBufferCreateInfo->usage,
19391 *pAllocationCreateInfo,
19392 VMA_SUBALLOCATION_TYPE_BUFFER,
19396 #if VMA_RECORDING_ENABLED
19397 if(allocator->GetRecorder() != VMA_NULL)
19399 allocator->GetRecorder()->RecordCreateBuffer(
19400 allocator->GetCurrentFrameIndex(),
19401 *pBufferCreateInfo,
19402 *pAllocationCreateInfo,
19412 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19417 #if VMA_STATS_STRING_ENABLED
19418 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19420 if(pAllocationInfo != VMA_NULL)
19422 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19427 allocator->FreeMemory(
19430 *pAllocation = VK_NULL_HANDLE;
19431 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19432 *pBuffer = VK_NULL_HANDLE;
19435 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19436 *pBuffer = VK_NULL_HANDLE;
19447 VMA_ASSERT(allocator);
19449 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19454 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19456 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19458 #if VMA_RECORDING_ENABLED
19459 if(allocator->GetRecorder() != VMA_NULL)
19461 allocator->GetRecorder()->RecordDestroyBuffer(
19462 allocator->GetCurrentFrameIndex(),
19467 if(buffer != VK_NULL_HANDLE)
19469 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19472 if(allocation != VK_NULL_HANDLE)
19474 allocator->FreeMemory(
19482 const VkImageCreateInfo* pImageCreateInfo,
19488 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19490 if(pImageCreateInfo->extent.width == 0 ||
19491 pImageCreateInfo->extent.height == 0 ||
19492 pImageCreateInfo->extent.depth == 0 ||
19493 pImageCreateInfo->mipLevels == 0 ||
19494 pImageCreateInfo->arrayLayers == 0)
19496 return VK_ERROR_VALIDATION_FAILED_EXT;
19499 VMA_DEBUG_LOG(
"vmaCreateImage");
19501 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19503 *pImage = VK_NULL_HANDLE;
19504 *pAllocation = VK_NULL_HANDLE;
19507 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19508 allocator->m_hDevice,
19510 allocator->GetAllocationCallbacks(),
19514 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19515 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19516 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19519 VkMemoryRequirements vkMemReq = {};
19520 bool requiresDedicatedAllocation =
false;
19521 bool prefersDedicatedAllocation =
false;
19522 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19523 requiresDedicatedAllocation, prefersDedicatedAllocation);
19525 res = allocator->AllocateMemory(
19527 requiresDedicatedAllocation,
19528 prefersDedicatedAllocation,
19532 *pAllocationCreateInfo,
19537 #if VMA_RECORDING_ENABLED
19538 if(allocator->GetRecorder() != VMA_NULL)
19540 allocator->GetRecorder()->RecordCreateImage(
19541 allocator->GetCurrentFrameIndex(),
19543 *pAllocationCreateInfo,
19553 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19558 #if VMA_STATS_STRING_ENABLED
19559 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19561 if(pAllocationInfo != VMA_NULL)
19563 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19568 allocator->FreeMemory(
19571 *pAllocation = VK_NULL_HANDLE;
19572 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19573 *pImage = VK_NULL_HANDLE;
19576 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19577 *pImage = VK_NULL_HANDLE;
19588 VMA_ASSERT(allocator);
19590 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19595 VMA_DEBUG_LOG(
"vmaDestroyImage");
19597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19599 #if VMA_RECORDING_ENABLED
19600 if(allocator->GetRecorder() != VMA_NULL)
19602 allocator->GetRecorder()->RecordDestroyImage(
19603 allocator->GetCurrentFrameIndex(),
19608 if(image != VK_NULL_HANDLE)
19610 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19612 if(allocation != VK_NULL_HANDLE)
19614 allocator->FreeMemory(
Definition: vk_mem_alloc.h:2879
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2905
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2911
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2897
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2918
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2892
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:2925
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2887
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2881
Represents single memory allocation.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:3246
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:3270
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:3290
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:3251
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:3281
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:3295
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:3260
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:2413
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:2418
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2444
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:2469
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:2415
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null.
Definition: vk_mem_alloc.h:2475
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:2427
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2487
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:2424
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:2482
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:2421
uint32_t vulkanApiVersion
Optional. The highest version of Vulkan that the application is designed to use.
Definition: vk_mem_alloc.h:2496
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:2430
Represents main object of this library initialized.
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:2511
VkDevice device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:2526
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2516
VkPhysicalDevice physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:2521
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
Definition: vk_mem_alloc.h:2617
VkDeviceSize blockBytes
Sum size of all VkDeviceMemory blocks allocated from particular heap, in bytes.
Definition: vk_mem_alloc.h:2620
VkDeviceSize allocationBytes
Sum size of all allocations created in particular heap, in bytes.
Definition: vk_mem_alloc.h:2631
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:2641
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:2652
Represents Opaque object that represents started defragmentation process.
Parameters for defragmentation.
Definition: vk_mem_alloc.h:3645
const VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:3685
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:3651
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:3705
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3700
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:3648
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:3666
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:3669
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:3714
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:3695
const VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:3660
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3690
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:3736
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:3746
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3741
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:3727
uint32_t moveCount
Definition: vk_mem_alloc.h:3728
VmaDefragmentationPassMoveInfo * pMoves
Definition: vk_mem_alloc.h:3729
Definition: vk_mem_alloc.h:3717
VkDeviceMemory memory
Definition: vk_mem_alloc.h:3719
VkDeviceSize offset
Definition: vk_mem_alloc.h:3720
VmaAllocation allocation
Definition: vk_mem_alloc.h:3718
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:3750
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:3758
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3752
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:3754
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:3756
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:2222
void * pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:2228
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:2224
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:2226
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:3047
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:3095
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:3050
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:3053
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:3089
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:3062
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:3067
VkDeviceSize minAllocationAlignment
Additional minimum alignment to be used for all allocations created from this pool....
Definition: vk_mem_alloc.h:3102
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:3075
void * pMemoryAllocateNext
Additional pNext chain to be attached to VkMemoryAllocateInfo used for every allocation made by this ...
Definition: vk_mem_alloc.h:3112
Represents custom memory pool.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:3117
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:3120
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:3139
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:3136
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:3126
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3123
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3129
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:2398
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:2408
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:2400
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:2578
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2589
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2589
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2588
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2590
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:2582
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2590
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2586
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:2580
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2589
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:2584
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2590
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2595
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2597
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2596
VmaStatInfo total
Definition: vk_mem_alloc.h:2598
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:2352
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:2362
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:2367
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:2355
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:2359
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:2364
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:2356
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:2363
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:2360
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:2354
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:2353
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:2366
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:2368
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:2361
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:2357
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:2358
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:2369
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:2365
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:2208
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VkResult vmaEndDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context)
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:2029
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:3043
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:2384
@ VMA_RECORD_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2392
@ VMA_RECORD_FLUSH_AFTER_CALL_BIT
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:2390
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:2232
@ VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
Definition: vk_mem_alloc.h:2307
@ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:2237
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT
Definition: vk_mem_alloc.h:2289
@ VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
Definition: vk_mem_alloc.h:2325
@ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT
Definition: vk_mem_alloc.h:2277
@ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:2262
@ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2344
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT
Definition: vk_mem_alloc.h:2342
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2876
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
void vmaFreeMemory(VmaAllocator allocator, const VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:3635
@ VMA_DEFRAGMENTATION_FLAG_INCREMENTAL
Definition: vk_mem_alloc.h:3636
@ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3637
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
struct VmaDefragmentationPassInfo VmaDefragmentationPassInfo
Parameters for incremental defragmentation steps.
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:2201
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, const VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:3639
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2987
@ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3022
@ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3041
@ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3033
@ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:3005
@ VMA_POOL_CREATE_ALGORITHM_MASK
Definition: vk_mem_alloc.h:3037
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkResult vmaDefragment(VmaAllocator allocator, const VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
VmaMemoryUsage
Definition: vk_mem_alloc.h:2700
@ VMA_MEMORY_USAGE_MAX_ENUM
Definition: vk_mem_alloc.h:2763
@ VMA_MEMORY_USAGE_CPU_ONLY
Definition: vk_mem_alloc.h:2731
@ VMA_MEMORY_USAGE_CPU_COPY
Definition: vk_mem_alloc.h:2753
@ VMA_MEMORY_USAGE_GPU_TO_CPU
Definition: vk_mem_alloc.h:2747
@ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED
Definition: vk_mem_alloc.h:2761
@ VMA_MEMORY_USAGE_CPU_TO_GPU
Definition: vk_mem_alloc.h:2738
@ VMA_MEMORY_USAGE_GPU_ONLY
Definition: vk_mem_alloc.h:2721
@ VMA_MEMORY_USAGE_UNKNOWN
Definition: vk_mem_alloc.h:2704
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
VkResult vmaInvalidateAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Invalidates memory of given set of allocations.
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
VkResult vmaBeginDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context, VmaDefragmentationPassInfo *pInfo)
VkResult vmaFlushAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Flushes memory of given set of allocations.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:2346
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2767
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Definition: vk_mem_alloc.h:2862
@ VMA_ALLOCATION_CREATE_MAPPED_BIT
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2798
@ VMA_ALLOCATION_CREATE_DONT_BIND_BIT
Definition: vk_mem_alloc.h:2835
@ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT
Definition: vk_mem_alloc.h:2855
@ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2774
@ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Definition: vk_mem_alloc.h:2829
@ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
Definition: vk_mem_alloc.h:2811
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT
Definition: vk_mem_alloc.h:2865
@ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
Definition: vk_mem_alloc.h:2818
@ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT
Definition: vk_mem_alloc.h:2844
@ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2785
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Definition: vk_mem_alloc.h:2859
@ VMA_ALLOCATION_CREATE_STRATEGY_MASK
Definition: vk_mem_alloc.h:2869
@ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT
Definition: vk_mem_alloc.h:2824
@ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT
Definition: vk_mem_alloc.h:2839
@ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT
Definition: vk_mem_alloc.h:2848
@ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2874
void vmaSetPoolName(VmaAllocator allocator, VmaPool pool, const char *pName)
Sets name of a custom pool.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
void vmaGetBudget(VmaAllocator allocator, VmaBudget *pBudget)
Retrieves information about current memory budget for all memory heaps.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
void vmaGetPoolName(VmaAllocator allocator, VmaPool pool, const char **ppName)
Retrieves name of a custom pool.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:2394
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
void vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo *pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.