23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2028 #ifndef VMA_RECORDING_ENABLED
2029 #define VMA_RECORDING_ENABLED 0
2032 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2036 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2037 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2038 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2039 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2040 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2041 extern PFN_vkAllocateMemory vkAllocateMemory;
2042 extern PFN_vkFreeMemory vkFreeMemory;
2043 extern PFN_vkMapMemory vkMapMemory;
2044 extern PFN_vkUnmapMemory vkUnmapMemory;
2045 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2046 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2047 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2048 extern PFN_vkBindImageMemory vkBindImageMemory;
2049 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2050 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2051 extern PFN_vkCreateBuffer vkCreateBuffer;
2052 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2053 extern PFN_vkCreateImage vkCreateImage;
2054 extern PFN_vkDestroyImage vkDestroyImage;
2055 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2056 #if VMA_VULKAN_VERSION >= 1001000
2057 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2058 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2059 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2060 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2061 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2066 #include <vulkan/vulkan.h>
2072 #if !defined(VMA_VULKAN_VERSION)
2073 #if defined(VK_VERSION_1_2)
2074 #define VMA_VULKAN_VERSION 1002000
2075 #elif defined(VK_VERSION_1_1)
2076 #define VMA_VULKAN_VERSION 1001000
2078 #define VMA_VULKAN_VERSION 1000000
2082 #if !defined(VMA_DEDICATED_ALLOCATION)
2083 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2084 #define VMA_DEDICATED_ALLOCATION 1
2086 #define VMA_DEDICATED_ALLOCATION 0
2090 #if !defined(VMA_BIND_MEMORY2)
2091 #if VK_KHR_bind_memory2
2092 #define VMA_BIND_MEMORY2 1
2094 #define VMA_BIND_MEMORY2 0
2098 #if !defined(VMA_MEMORY_BUDGET)
2099 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2100 #define VMA_MEMORY_BUDGET 1
2102 #define VMA_MEMORY_BUDGET 0
2107 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2108 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2109 #define VMA_BUFFER_DEVICE_ADDRESS 1
2111 #define VMA_BUFFER_DEVICE_ADDRESS 0
2116 #if !defined(VMA_MEMORY_PRIORITY)
2117 #if VK_EXT_memory_priority
2118 #define VMA_MEMORY_PRIORITY 1
2120 #define VMA_MEMORY_PRIORITY 0
2125 #if !defined(VMA_EXTERNAL_MEMORY)
2126 #if VK_KHR_external_memory
2127 #define VMA_EXTERNAL_MEMORY 1
2129 #define VMA_EXTERNAL_MEMORY 0
2138 #ifndef VMA_CALL_PRE
2139 #define VMA_CALL_PRE
2141 #ifndef VMA_CALL_POST
2142 #define VMA_CALL_POST
2156 #ifndef VMA_LEN_IF_NOT_NULL
2157 #define VMA_LEN_IF_NOT_NULL(len)
2162 #ifndef VMA_NULLABLE
2164 #define VMA_NULLABLE _Nullable
2166 #define VMA_NULLABLE
2172 #ifndef VMA_NOT_NULL
2174 #define VMA_NOT_NULL _Nonnull
2176 #define VMA_NOT_NULL
2182 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2183 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2184 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2186 #define VMA_NOT_NULL_NON_DISPATCHABLE
2190 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2191 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2192 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2194 #define VMA_NULLABLE_NON_DISPATCHABLE
2212 uint32_t memoryType,
2213 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2215 void* VMA_NULLABLE pUserData);
2219 uint32_t memoryType,
2220 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2222 void* VMA_NULLABLE pUserData);
2379 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2380 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2381 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2383 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2384 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2385 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2387 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2388 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2478 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2506 #if VMA_EXTERNAL_MEMORY
2563 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2571 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2581 uint32_t memoryTypeIndex,
2582 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2594 uint32_t frameIndex);
2690 #ifndef VMA_STATS_STRING_ENABLED
2691 #define VMA_STATS_STRING_ENABLED 1
2694 #if VMA_STATS_STRING_ENABLED
2701 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2702 VkBool32 detailedMap);
2706 char* VMA_NULLABLE pStatsString);
2967 uint32_t memoryTypeBits,
2969 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2985 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2987 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3003 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3005 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3172 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3200 size_t* VMA_NULLABLE pLostAllocationCount);
3227 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3237 const char* VMA_NULLABLE pName);
3331 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3357 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3359 size_t allocationCount,
3360 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3361 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3371 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3379 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3404 size_t allocationCount,
3405 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3462 void* VMA_NULLABLE pUserData);
3519 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3557 VkDeviceSize offset,
3584 VkDeviceSize offset,
3603 uint32_t allocationCount,
3604 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3605 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3606 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3624 uint32_t allocationCount,
3625 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3626 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3627 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3740 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3878 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3879 size_t allocationCount,
3880 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3899 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3914 VkDeviceSize allocationLocalOffset,
3915 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3916 const void* VMA_NULLABLE pNext);
3933 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3948 VkDeviceSize allocationLocalOffset,
3949 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3950 const void* VMA_NULLABLE pNext);
3984 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3986 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
4003 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
4009 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
4011 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
4028 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
4038 #if defined(__cplusplus) && defined(__INTELLISENSE__)
4039 #define VMA_IMPLEMENTATION
4042 #ifdef VMA_IMPLEMENTATION
4043 #undef VMA_IMPLEMENTATION
4050 #if VMA_RECORDING_ENABLED
4053 #include <windows.h>
4073 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4074 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4083 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4084 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4085 #if defined(VK_NO_PROTOTYPES)
4086 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4087 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4100 #if VMA_USE_STL_CONTAINERS
4101 #define VMA_USE_STL_VECTOR 1
4102 #define VMA_USE_STL_UNORDERED_MAP 1
4103 #define VMA_USE_STL_LIST 1
4106 #ifndef VMA_USE_STL_SHARED_MUTEX
4108 #if __cplusplus >= 201703L
4109 #define VMA_USE_STL_SHARED_MUTEX 1
4113 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4114 #define VMA_USE_STL_SHARED_MUTEX 1
4116 #define VMA_USE_STL_SHARED_MUTEX 0
4124 #if VMA_USE_STL_VECTOR
4128 #if VMA_USE_STL_UNORDERED_MAP
4129 #include <unordered_map>
4132 #if VMA_USE_STL_LIST
4141 #include <algorithm>
4146 #define VMA_NULL nullptr
4149 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4151 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4154 if(alignment <
sizeof(
void*))
4156 alignment =
sizeof(
void*);
4159 return memalign(alignment, size);
4161 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4164 #if defined(__APPLE__)
4165 #include <AvailabilityMacros.h>
4168 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4170 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4171 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4178 if (__builtin_available(macOS 10.15, iOS 13, *))
4179 return aligned_alloc(alignment, size);
4183 if(alignment <
sizeof(
void*))
4185 alignment =
sizeof(
void*);
4189 if(posix_memalign(&pointer, alignment, size) == 0)
4193 #elif defined(_WIN32)
4194 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4196 return _aligned_malloc(size, alignment);
4199 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4201 return aligned_alloc(alignment, size);
4206 static void vma_aligned_free(
void* ptr)
4211 static void vma_aligned_free(
void* VMA_NULLABLE ptr)
4225 #define VMA_ASSERT(expr)
4227 #define VMA_ASSERT(expr) assert(expr)
4233 #ifndef VMA_HEAVY_ASSERT
4235 #define VMA_HEAVY_ASSERT(expr)
4237 #define VMA_HEAVY_ASSERT(expr)
4241 #ifndef VMA_ALIGN_OF
4242 #define VMA_ALIGN_OF(type) (__alignof(type))
4245 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4246 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4249 #ifndef VMA_SYSTEM_ALIGNED_FREE
4251 #if defined(VMA_SYSTEM_FREE)
4252 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4254 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4259 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4263 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4267 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4271 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4274 #ifndef VMA_DEBUG_LOG
4275 #define VMA_DEBUG_LOG(format, ...)
4285 #if VMA_STATS_STRING_ENABLED
4286 static inline void VmaUint32ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint32_t num)
4288 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4290 static inline void VmaUint64ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint64_t num)
4292 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4294 static inline void VmaPtrToStr(
char* VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
4296 snprintf(outStr, strLen,
"%p", ptr);
4304 void Lock() { m_Mutex.lock(); }
4305 void Unlock() { m_Mutex.unlock(); }
4306 bool TryLock() {
return m_Mutex.try_lock(); }
4310 #define VMA_MUTEX VmaMutex
4314 #ifndef VMA_RW_MUTEX
4315 #if VMA_USE_STL_SHARED_MUTEX
4317 #include <shared_mutex>
4321 void LockRead() { m_Mutex.lock_shared(); }
4322 void UnlockRead() { m_Mutex.unlock_shared(); }
4323 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4324 void LockWrite() { m_Mutex.lock(); }
4325 void UnlockWrite() { m_Mutex.unlock(); }
4326 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4328 std::shared_mutex m_Mutex;
4330 #define VMA_RW_MUTEX VmaRWMutex
4331 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4337 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4338 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4339 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4340 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4341 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4342 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4343 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4347 #define VMA_RW_MUTEX VmaRWMutex
4353 void LockRead() { m_Mutex.Lock(); }
4354 void UnlockRead() { m_Mutex.Unlock(); }
4355 bool TryLockRead() {
return m_Mutex.TryLock(); }
4356 void LockWrite() { m_Mutex.Lock(); }
4357 void UnlockWrite() { m_Mutex.Unlock(); }
4358 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4362 #define VMA_RW_MUTEX VmaRWMutex
4369 #ifndef VMA_ATOMIC_UINT32
4371 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4374 #ifndef VMA_ATOMIC_UINT64
4376 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4379 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4384 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4387 #ifndef VMA_MIN_ALIGNMENT
4392 #ifdef VMA_DEBUG_ALIGNMENT
4393 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT
4395 #define VMA_MIN_ALIGNMENT (1)
4399 #ifndef VMA_DEBUG_MARGIN
4404 #define VMA_DEBUG_MARGIN (0)
4407 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4412 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4415 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4421 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4424 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4429 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4432 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4437 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4440 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
4445 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
4448 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4450 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4453 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4455 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4458 #ifndef VMA_CLASS_NO_COPY
4459 #define VMA_CLASS_NO_COPY(className) \
4461 className(const className&) = delete; \
4462 className& operator=(const className&) = delete;
4465 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4468 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4470 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4471 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4479 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4480 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4481 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4483 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4485 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4486 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4489 static inline uint32_t VmaCountBitsSet(uint32_t v)
4491 uint32_t c = v - ((v >> 1) & 0x55555555);
4492 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4493 c = ((c >> 4) + c) & 0x0F0F0F0F;
4494 c = ((c >> 8) + c) & 0x00FF00FF;
4495 c = ((c >> 16) + c) & 0x0000FFFF;
4504 template <
typename T>
4505 inline bool VmaIsPow2(T x)
4507 return (x & (x-1)) == 0;
4512 template <
typename T>
4513 static inline T VmaAlignUp(T val, T alignment)
4515 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4516 return (val + alignment - 1) & ~(alignment - 1);
4520 template <
typename T>
4521 static inline T VmaAlignDown(T val, T alignment)
4523 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4524 return val & ~(alignment - 1);
4528 template <
typename T>
4529 static inline T VmaRoundDiv(T x, T y)
4531 return (x + (y / (T)2)) / y;
4535 static inline uint32_t VmaNextPow2(uint32_t v)
4546 static inline uint64_t VmaNextPow2(uint64_t v)
4560 static inline uint32_t VmaPrevPow2(uint32_t v)
4570 static inline uint64_t VmaPrevPow2(uint64_t v)
4582 static inline bool VmaStrIsEmpty(
const char* pStr)
4584 return pStr == VMA_NULL || *pStr ==
'\0';
4587 #if VMA_STATS_STRING_ENABLED
4589 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4609 template<
typename Iterator,
typename Compare>
4610 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4612 Iterator centerValue = end; --centerValue;
4613 Iterator insertIndex = beg;
4614 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4616 if(cmp(*memTypeIndex, *centerValue))
4618 if(insertIndex != memTypeIndex)
4620 VMA_SWAP(*memTypeIndex, *insertIndex);
4625 if(insertIndex != centerValue)
4627 VMA_SWAP(*insertIndex, *centerValue);
4632 template<
typename Iterator,
typename Compare>
4633 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4637 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4638 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4639 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4643 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4654 static inline bool VmaBlocksOnSamePage(
4655 VkDeviceSize resourceAOffset,
4656 VkDeviceSize resourceASize,
4657 VkDeviceSize resourceBOffset,
4658 VkDeviceSize pageSize)
4660 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4661 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4662 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4663 VkDeviceSize resourceBStart = resourceBOffset;
4664 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4665 return resourceAEndPage == resourceBStartPage;
4668 enum VmaSuballocationType
4670 VMA_SUBALLOCATION_TYPE_FREE = 0,
4671 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4672 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4673 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4674 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4675 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4676 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4685 static inline bool VmaIsBufferImageGranularityConflict(
4686 VmaSuballocationType suballocType1,
4687 VmaSuballocationType suballocType2)
4689 if(suballocType1 > suballocType2)
4691 VMA_SWAP(suballocType1, suballocType2);
4694 switch(suballocType1)
4696 case VMA_SUBALLOCATION_TYPE_FREE:
4698 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4700 case VMA_SUBALLOCATION_TYPE_BUFFER:
4702 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4703 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4704 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4706 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4707 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4708 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4709 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4711 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4712 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4720 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4722 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4723 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4724 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4725 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4727 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4734 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4736 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4737 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4738 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4739 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4741 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4754 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4756 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4757 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4758 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4759 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4765 VMA_CLASS_NO_COPY(VmaMutexLock)
4767 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4768 m_pMutex(useMutex ? &mutex : VMA_NULL)
4769 {
if(m_pMutex) { m_pMutex->Lock(); } }
4771 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4773 VMA_MUTEX* m_pMutex;
4777 struct VmaMutexLockRead
4779 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4781 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4782 m_pMutex(useMutex ? &mutex : VMA_NULL)
4783 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4784 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4786 VMA_RW_MUTEX* m_pMutex;
4790 struct VmaMutexLockWrite
4792 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4794 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4795 m_pMutex(useMutex ? &mutex : VMA_NULL)
4796 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4797 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4799 VMA_RW_MUTEX* m_pMutex;
4802 #if VMA_DEBUG_GLOBAL_MUTEX
4803 static VMA_MUTEX gDebugGlobalMutex;
4804 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4806 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4810 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4821 template <
typename CmpLess,
typename IterT,
typename KeyT>
4822 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4824 size_t down = 0, up = (end - beg);
4827 const size_t mid = down + (up - down) / 2;
4828 if(cmp(*(beg+mid), key))
4840 template<
typename CmpLess,
typename IterT,
typename KeyT>
4841 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4843 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4844 beg, end, value, cmp);
4846 (!cmp(*it, value) && !cmp(value, *it)))
4858 template<
typename T>
4859 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4861 for(uint32_t i = 0; i < count; ++i)
4863 const T iPtr = arr[i];
4864 if(iPtr == VMA_NULL)
4868 for(uint32_t j = i + 1; j < count; ++j)
4879 template<
typename MainT,
typename NewT>
4880 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4882 newStruct->pNext = mainStruct->pNext;
4883 mainStruct->pNext = newStruct;
4889 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4891 void* result = VMA_NULL;
4892 if((pAllocationCallbacks != VMA_NULL) &&
4893 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4895 result = (*pAllocationCallbacks->pfnAllocation)(
4896 pAllocationCallbacks->pUserData,
4899 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4903 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4905 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4909 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4911 if((pAllocationCallbacks != VMA_NULL) &&
4912 (pAllocationCallbacks->pfnFree != VMA_NULL))
4914 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4918 VMA_SYSTEM_ALIGNED_FREE(ptr);
4922 template<
typename T>
4923 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4925 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4928 template<
typename T>
4929 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4931 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4934 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4936 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4938 template<
typename T>
4939 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4942 VmaFree(pAllocationCallbacks, ptr);
4945 template<
typename T>
4946 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4950 for(
size_t i = count; i--; )
4954 VmaFree(pAllocationCallbacks, ptr);
4958 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4960 if(srcStr != VMA_NULL)
4962 const size_t len = strlen(srcStr);
4963 char*
const result = vma_new_array(allocs,
char, len + 1);
4964 memcpy(result, srcStr, len + 1);
4973 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4977 const size_t len = strlen(str);
4978 vma_delete_array(allocs, str, len + 1);
4983 template<
typename T>
4984 class VmaStlAllocator
4987 const VkAllocationCallbacks*
const m_pCallbacks;
4988 typedef T value_type;
4990 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4991 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4993 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4994 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4996 template<
typename U>
4997 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4999 return m_pCallbacks == rhs.m_pCallbacks;
5001 template<
typename U>
5002 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
5004 return m_pCallbacks != rhs.m_pCallbacks;
5007 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
5008 VmaStlAllocator(
const VmaStlAllocator&) =
default;
5011 #if VMA_USE_STL_VECTOR
5013 #define VmaVector std::vector
5015 template<
typename T,
typename allocatorT>
5016 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
5018 vec.insert(vec.begin() + index, item);
5021 template<
typename T,
typename allocatorT>
5022 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
5024 vec.erase(vec.begin() + index);
5032 template<
typename T,
typename AllocatorT>
5036 typedef T value_type;
5038 VmaVector(
const AllocatorT& allocator) :
5039 m_Allocator(allocator),
5046 VmaVector(
size_t count,
const AllocatorT& allocator) :
5047 m_Allocator(allocator),
5048 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
5056 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
5057 : VmaVector(count, allocator) {}
5059 VmaVector(
const VmaVector<T, AllocatorT>& src) :
5060 m_Allocator(src.m_Allocator),
5061 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
5062 m_Count(src.m_Count),
5063 m_Capacity(src.m_Count)
5067 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
5073 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5076 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5080 resize(rhs.m_Count);
5083 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5089 bool empty()
const {
return m_Count == 0; }
5090 size_t size()
const {
return m_Count; }
5091 T* data() {
return m_pArray; }
5092 const T* data()
const {
return m_pArray; }
5094 T& operator[](
size_t index)
5096 VMA_HEAVY_ASSERT(index < m_Count);
5097 return m_pArray[index];
5099 const T& operator[](
size_t index)
const
5101 VMA_HEAVY_ASSERT(index < m_Count);
5102 return m_pArray[index];
5107 VMA_HEAVY_ASSERT(m_Count > 0);
5110 const T& front()
const
5112 VMA_HEAVY_ASSERT(m_Count > 0);
5117 VMA_HEAVY_ASSERT(m_Count > 0);
5118 return m_pArray[m_Count - 1];
5120 const T& back()
const
5122 VMA_HEAVY_ASSERT(m_Count > 0);
5123 return m_pArray[m_Count - 1];
5126 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5128 newCapacity = VMA_MAX(newCapacity, m_Count);
5130 if((newCapacity < m_Capacity) && !freeMemory)
5132 newCapacity = m_Capacity;
5135 if(newCapacity != m_Capacity)
5137 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5140 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5142 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5143 m_Capacity = newCapacity;
5144 m_pArray = newArray;
5148 void resize(
size_t newCount)
5150 size_t newCapacity = m_Capacity;
5151 if(newCount > m_Capacity)
5153 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5156 if(newCapacity != m_Capacity)
5158 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5159 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5160 if(elementsToCopy != 0)
5162 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5164 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5165 m_Capacity = newCapacity;
5166 m_pArray = newArray;
5177 void shrink_to_fit()
5179 if(m_Capacity > m_Count)
5181 T* newArray = VMA_NULL;
5184 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
5185 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5187 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5188 m_Capacity = m_Count;
5189 m_pArray = newArray;
5193 void insert(
size_t index,
const T& src)
5195 VMA_HEAVY_ASSERT(index <= m_Count);
5196 const size_t oldCount = size();
5197 resize(oldCount + 1);
5198 if(index < oldCount)
5200 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5202 m_pArray[index] = src;
5205 void remove(
size_t index)
5207 VMA_HEAVY_ASSERT(index < m_Count);
5208 const size_t oldCount = size();
5209 if(index < oldCount - 1)
5211 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5213 resize(oldCount - 1);
5216 void push_back(
const T& src)
5218 const size_t newIndex = size();
5219 resize(newIndex + 1);
5220 m_pArray[newIndex] = src;
5225 VMA_HEAVY_ASSERT(m_Count > 0);
5229 void push_front(
const T& src)
5236 VMA_HEAVY_ASSERT(m_Count > 0);
5240 typedef T* iterator;
5241 typedef const T* const_iterator;
5243 iterator begin() {
return m_pArray; }
5244 iterator end() {
return m_pArray + m_Count; }
5245 const_iterator cbegin()
const {
return m_pArray; }
5246 const_iterator cend()
const {
return m_pArray + m_Count; }
5247 const_iterator begin()
const {
return cbegin(); }
5248 const_iterator end()
const {
return cend(); }
5251 AllocatorT m_Allocator;
5257 template<
typename T,
typename allocatorT>
5258 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5260 vec.insert(index, item);
5263 template<
typename T,
typename allocatorT>
5264 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5271 template<
typename CmpLess,
typename VectorT>
5272 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5274 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5276 vector.data() + vector.size(),
5278 CmpLess()) - vector.data();
5279 VmaVectorInsert(vector, indexToInsert, value);
5280 return indexToInsert;
5283 template<
typename CmpLess,
typename VectorT>
5284 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5287 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5292 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5294 size_t indexToRemove = it - vector.begin();
5295 VmaVectorRemove(vector, indexToRemove);
5312 template<
typename T,
typename AllocatorT,
size_t N>
5313 class VmaSmallVector
5316 typedef T value_type;
5318 VmaSmallVector(
const AllocatorT& allocator) :
5320 m_DynamicArray(allocator)
5323 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5325 m_DynamicArray(count > N ? count : 0, allocator)
5328 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5329 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5330 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5331 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5333 bool empty()
const {
return m_Count == 0; }
5334 size_t size()
const {
return m_Count; }
5335 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5336 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5338 T& operator[](
size_t index)
5340 VMA_HEAVY_ASSERT(index < m_Count);
5341 return data()[index];
5343 const T& operator[](
size_t index)
const
5345 VMA_HEAVY_ASSERT(index < m_Count);
5346 return data()[index];
5351 VMA_HEAVY_ASSERT(m_Count > 0);
5354 const T& front()
const
5356 VMA_HEAVY_ASSERT(m_Count > 0);
5361 VMA_HEAVY_ASSERT(m_Count > 0);
5362 return data()[m_Count - 1];
5364 const T& back()
const
5366 VMA_HEAVY_ASSERT(m_Count > 0);
5367 return data()[m_Count - 1];
5370 void resize(
size_t newCount,
bool freeMemory =
false)
5372 if(newCount > N && m_Count > N)
5375 m_DynamicArray.resize(newCount);
5378 m_DynamicArray.shrink_to_fit();
5381 else if(newCount > N && m_Count <= N)
5384 m_DynamicArray.resize(newCount);
5387 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5390 else if(newCount <= N && m_Count > N)
5395 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5397 m_DynamicArray.resize(0);
5400 m_DynamicArray.shrink_to_fit();
5410 void clear(
bool freeMemory =
false)
5412 m_DynamicArray.clear();
5415 m_DynamicArray.shrink_to_fit();
5420 void insert(
size_t index,
const T& src)
5422 VMA_HEAVY_ASSERT(index <= m_Count);
5423 const size_t oldCount = size();
5424 resize(oldCount + 1);
5425 T*
const dataPtr = data();
5426 if(index < oldCount)
5429 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5431 dataPtr[index] = src;
5434 void remove(
size_t index)
5436 VMA_HEAVY_ASSERT(index < m_Count);
5437 const size_t oldCount = size();
5438 if(index < oldCount - 1)
5441 T*
const dataPtr = data();
5442 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5444 resize(oldCount - 1);
5447 void push_back(
const T& src)
5449 const size_t newIndex = size();
5450 resize(newIndex + 1);
5451 data()[newIndex] = src;
5456 VMA_HEAVY_ASSERT(m_Count > 0);
5460 void push_front(
const T& src)
5467 VMA_HEAVY_ASSERT(m_Count > 0);
5471 typedef T* iterator;
5473 iterator begin() {
return data(); }
5474 iterator end() {
return data() + m_Count; }
5479 VmaVector<T, AllocatorT> m_DynamicArray;
5490 template<
typename T>
5491 class VmaPoolAllocator
5493 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5495 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5496 ~VmaPoolAllocator();
5497 template<
typename... Types> T* Alloc(Types... args);
5503 uint32_t NextFreeIndex;
5504 alignas(T)
char Value[
sizeof(T)];
5511 uint32_t FirstFreeIndex;
5514 const VkAllocationCallbacks* m_pAllocationCallbacks;
5515 const uint32_t m_FirstBlockCapacity;
5516 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5518 ItemBlock& CreateNewBlock();
5521 template<
typename T>
5522 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5523 m_pAllocationCallbacks(pAllocationCallbacks),
5524 m_FirstBlockCapacity(firstBlockCapacity),
5525 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5527 VMA_ASSERT(m_FirstBlockCapacity > 1);
5530 template<
typename T>
5531 VmaPoolAllocator<T>::~VmaPoolAllocator()
5533 for(
size_t i = m_ItemBlocks.size(); i--; )
5534 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5535 m_ItemBlocks.clear();
5538 template<
typename T>
5539 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5541 for(
size_t i = m_ItemBlocks.size(); i--; )
5543 ItemBlock& block = m_ItemBlocks[i];
5545 if(block.FirstFreeIndex != UINT32_MAX)
5547 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5548 block.FirstFreeIndex = pItem->NextFreeIndex;
5549 T* result = (T*)&pItem->Value;
5550 new(result)T(std::forward<Types>(args)...);
5556 ItemBlock& newBlock = CreateNewBlock();
5557 Item*
const pItem = &newBlock.pItems[0];
5558 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5559 T* result = (T*)&pItem->Value;
5560 new(result)T(std::forward<Types>(args)...);
5564 template<
typename T>
5565 void VmaPoolAllocator<T>::Free(T* ptr)
5568 for(
size_t i = m_ItemBlocks.size(); i--; )
5570 ItemBlock& block = m_ItemBlocks[i];
5574 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5577 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5580 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5581 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5582 block.FirstFreeIndex = index;
5586 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5589 template<
typename T>
5590 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5592 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5593 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5595 const ItemBlock newBlock = {
5596 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5600 m_ItemBlocks.push_back(newBlock);
5603 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5604 newBlock.pItems[i].NextFreeIndex = i + 1;
5605 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5606 return m_ItemBlocks.back();
5612 #if VMA_USE_STL_LIST
5614 #define VmaList std::list
5618 template<
typename T>
5627 template<
typename T>
5630 VMA_CLASS_NO_COPY(VmaRawList)
5632 typedef VmaListItem<T> ItemType;
5634 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5638 size_t GetCount()
const {
return m_Count; }
5639 bool IsEmpty()
const {
return m_Count == 0; }
5641 ItemType* Front() {
return m_pFront; }
5642 const ItemType* Front()
const {
return m_pFront; }
5643 ItemType* Back() {
return m_pBack; }
5644 const ItemType* Back()
const {
return m_pBack; }
5646 ItemType* PushBack();
5647 ItemType* PushFront();
5648 ItemType* PushBack(
const T& value);
5649 ItemType* PushFront(
const T& value);
5654 ItemType* InsertBefore(ItemType* pItem);
5656 ItemType* InsertAfter(ItemType* pItem);
5658 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5659 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5661 void Remove(ItemType* pItem);
5664 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5665 VmaPoolAllocator<ItemType> m_ItemAllocator;
5671 template<
typename T>
5672 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5673 m_pAllocationCallbacks(pAllocationCallbacks),
5674 m_ItemAllocator(pAllocationCallbacks, 128),
5681 template<
typename T>
5682 VmaRawList<T>::~VmaRawList()
5688 template<
typename T>
5689 void VmaRawList<T>::Clear()
5691 if(IsEmpty() ==
false)
5693 ItemType* pItem = m_pBack;
5694 while(pItem != VMA_NULL)
5696 ItemType*
const pPrevItem = pItem->pPrev;
5697 m_ItemAllocator.Free(pItem);
5700 m_pFront = VMA_NULL;
5706 template<
typename T>
5707 VmaListItem<T>* VmaRawList<T>::PushBack()
5709 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5710 pNewItem->pNext = VMA_NULL;
5713 pNewItem->pPrev = VMA_NULL;
5714 m_pFront = pNewItem;
5720 pNewItem->pPrev = m_pBack;
5721 m_pBack->pNext = pNewItem;
5728 template<
typename T>
5729 VmaListItem<T>* VmaRawList<T>::PushFront()
5731 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5732 pNewItem->pPrev = VMA_NULL;
5735 pNewItem->pNext = VMA_NULL;
5736 m_pFront = pNewItem;
5742 pNewItem->pNext = m_pFront;
5743 m_pFront->pPrev = pNewItem;
5744 m_pFront = pNewItem;
5750 template<
typename T>
5751 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5753 ItemType*
const pNewItem = PushBack();
5754 pNewItem->Value = value;
5758 template<
typename T>
5759 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5761 ItemType*
const pNewItem = PushFront();
5762 pNewItem->Value = value;
5766 template<
typename T>
5767 void VmaRawList<T>::PopBack()
5769 VMA_HEAVY_ASSERT(m_Count > 0);
5770 ItemType*
const pBackItem = m_pBack;
5771 ItemType*
const pPrevItem = pBackItem->pPrev;
5772 if(pPrevItem != VMA_NULL)
5774 pPrevItem->pNext = VMA_NULL;
5776 m_pBack = pPrevItem;
5777 m_ItemAllocator.Free(pBackItem);
5781 template<
typename T>
5782 void VmaRawList<T>::PopFront()
5784 VMA_HEAVY_ASSERT(m_Count > 0);
5785 ItemType*
const pFrontItem = m_pFront;
5786 ItemType*
const pNextItem = pFrontItem->pNext;
5787 if(pNextItem != VMA_NULL)
5789 pNextItem->pPrev = VMA_NULL;
5791 m_pFront = pNextItem;
5792 m_ItemAllocator.Free(pFrontItem);
5796 template<
typename T>
5797 void VmaRawList<T>::Remove(ItemType* pItem)
5799 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5800 VMA_HEAVY_ASSERT(m_Count > 0);
5802 if(pItem->pPrev != VMA_NULL)
5804 pItem->pPrev->pNext = pItem->pNext;
5808 VMA_HEAVY_ASSERT(m_pFront == pItem);
5809 m_pFront = pItem->pNext;
5812 if(pItem->pNext != VMA_NULL)
5814 pItem->pNext->pPrev = pItem->pPrev;
5818 VMA_HEAVY_ASSERT(m_pBack == pItem);
5819 m_pBack = pItem->pPrev;
5822 m_ItemAllocator.Free(pItem);
5826 template<
typename T>
5827 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5829 if(pItem != VMA_NULL)
5831 ItemType*
const prevItem = pItem->pPrev;
5832 ItemType*
const newItem = m_ItemAllocator.Alloc();
5833 newItem->pPrev = prevItem;
5834 newItem->pNext = pItem;
5835 pItem->pPrev = newItem;
5836 if(prevItem != VMA_NULL)
5838 prevItem->pNext = newItem;
5842 VMA_HEAVY_ASSERT(m_pFront == pItem);
5852 template<
typename T>
5853 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5855 if(pItem != VMA_NULL)
5857 ItemType*
const nextItem = pItem->pNext;
5858 ItemType*
const newItem = m_ItemAllocator.Alloc();
5859 newItem->pNext = nextItem;
5860 newItem->pPrev = pItem;
5861 pItem->pNext = newItem;
5862 if(nextItem != VMA_NULL)
5864 nextItem->pPrev = newItem;
5868 VMA_HEAVY_ASSERT(m_pBack == pItem);
5878 template<
typename T>
5879 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5881 ItemType*
const newItem = InsertBefore(pItem);
5882 newItem->Value = value;
5886 template<
typename T>
5887 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5889 ItemType*
const newItem = InsertAfter(pItem);
5890 newItem->Value = value;
5894 template<
typename T,
typename AllocatorT>
5897 VMA_CLASS_NO_COPY(VmaList)
5908 T& operator*()
const
5910 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5911 return m_pItem->Value;
5913 T* operator->()
const
5915 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5916 return &m_pItem->Value;
5919 iterator& operator++()
5921 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5922 m_pItem = m_pItem->pNext;
5925 iterator& operator--()
5927 if(m_pItem != VMA_NULL)
5929 m_pItem = m_pItem->pPrev;
5933 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5934 m_pItem = m_pList->Back();
5939 iterator operator++(
int)
5941 iterator result = *
this;
5945 iterator operator--(
int)
5947 iterator result = *
this;
5952 bool operator==(
const iterator& rhs)
const
5954 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5955 return m_pItem == rhs.m_pItem;
5957 bool operator!=(
const iterator& rhs)
const
5959 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5960 return m_pItem != rhs.m_pItem;
5964 VmaRawList<T>* m_pList;
5965 VmaListItem<T>* m_pItem;
5967 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5973 friend class VmaList<T, AllocatorT>;
5976 class const_iterator
5985 const_iterator(
const iterator& src) :
5986 m_pList(src.m_pList),
5987 m_pItem(src.m_pItem)
5991 const T& operator*()
const
5993 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5994 return m_pItem->Value;
5996 const T* operator->()
const
5998 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5999 return &m_pItem->Value;
6002 const_iterator& operator++()
6004 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
6005 m_pItem = m_pItem->pNext;
6008 const_iterator& operator--()
6010 if(m_pItem != VMA_NULL)
6012 m_pItem = m_pItem->pPrev;
6016 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
6017 m_pItem = m_pList->Back();
6022 const_iterator operator++(
int)
6024 const_iterator result = *
this;
6028 const_iterator operator--(
int)
6030 const_iterator result = *
this;
6035 bool operator==(
const const_iterator& rhs)
const
6037 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6038 return m_pItem == rhs.m_pItem;
6040 bool operator!=(
const const_iterator& rhs)
const
6042 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
6043 return m_pItem != rhs.m_pItem;
6047 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
6053 const VmaRawList<T>* m_pList;
6054 const VmaListItem<T>* m_pItem;
6056 friend class VmaList<T, AllocatorT>;
6059 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
6061 bool empty()
const {
return m_RawList.IsEmpty(); }
6062 size_t size()
const {
return m_RawList.GetCount(); }
6064 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
6065 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
6067 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
6068 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
6070 const_iterator begin()
const {
return cbegin(); }
6071 const_iterator end()
const {
return cend(); }
6073 void clear() { m_RawList.Clear(); }
6074 void push_back(
const T& value) { m_RawList.PushBack(value); }
6075 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
6076 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
6079 VmaRawList<T> m_RawList;
6098 template<
typename ItemTypeTraits>
6099 class VmaIntrusiveLinkedList
6102 typedef typename ItemTypeTraits::ItemType ItemType;
6103 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
6104 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
6106 VmaIntrusiveLinkedList() { }
6107 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6108 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
6109 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
6111 src.m_Front = src.m_Back = VMA_NULL;
6114 ~VmaIntrusiveLinkedList()
6116 VMA_HEAVY_ASSERT(IsEmpty());
6118 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
6119 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
6123 VMA_HEAVY_ASSERT(IsEmpty());
6124 m_Front = src.m_Front;
6125 m_Back = src.m_Back;
6126 m_Count = src.m_Count;
6127 src.m_Front = src.m_Back = VMA_NULL;
6136 ItemType* item = m_Back;
6137 while(item != VMA_NULL)
6139 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
6140 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6141 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6149 size_t GetCount()
const {
return m_Count; }
6150 bool IsEmpty()
const {
return m_Count == 0; }
6151 ItemType* Front() {
return m_Front; }
6152 const ItemType* Front()
const {
return m_Front; }
6153 ItemType* Back() {
return m_Back; }
6154 const ItemType* Back()
const {
return m_Back; }
6155 void PushBack(ItemType* item)
6157 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6166 ItemTypeTraits::AccessPrev(item) = m_Back;
6167 ItemTypeTraits::AccessNext(m_Back) = item;
6172 void PushFront(ItemType* item)
6174 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
6183 ItemTypeTraits::AccessNext(item) = m_Front;
6184 ItemTypeTraits::AccessPrev(m_Front) = item;
6191 VMA_HEAVY_ASSERT(m_Count > 0);
6192 ItemType*
const backItem = m_Back;
6193 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
6194 if(prevItem != VMA_NULL)
6196 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
6200 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
6201 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
6204 ItemType* PopFront()
6206 VMA_HEAVY_ASSERT(m_Count > 0);
6207 ItemType*
const frontItem = m_Front;
6208 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
6209 if(nextItem != VMA_NULL)
6211 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
6215 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
6216 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
6221 void InsertBefore(ItemType* existingItem, ItemType* newItem)
6223 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6224 if(existingItem != VMA_NULL)
6226 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
6227 ItemTypeTraits::AccessPrev(newItem) = prevItem;
6228 ItemTypeTraits::AccessNext(newItem) = existingItem;
6229 ItemTypeTraits::AccessPrev(existingItem) = newItem;
6230 if(prevItem != VMA_NULL)
6232 ItemTypeTraits::AccessNext(prevItem) = newItem;
6236 VMA_HEAVY_ASSERT(m_Front == existingItem);
6245 void InsertAfter(ItemType* existingItem, ItemType* newItem)
6247 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
6248 if(existingItem != VMA_NULL)
6250 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
6251 ItemTypeTraits::AccessNext(newItem) = nextItem;
6252 ItemTypeTraits::AccessPrev(newItem) = existingItem;
6253 ItemTypeTraits::AccessNext(existingItem) = newItem;
6254 if(nextItem != VMA_NULL)
6256 ItemTypeTraits::AccessPrev(nextItem) = newItem;
6260 VMA_HEAVY_ASSERT(m_Back == existingItem);
6266 return PushFront(newItem);
6268 void Remove(ItemType* item)
6270 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
6271 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
6273 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
6277 VMA_HEAVY_ASSERT(m_Front == item);
6278 m_Front = ItemTypeTraits::GetNext(item);
6281 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
6283 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
6287 VMA_HEAVY_ASSERT(m_Back == item);
6288 m_Back = ItemTypeTraits::GetPrev(item);
6290 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
6291 ItemTypeTraits::AccessNext(item) = VMA_NULL;
6295 ItemType* m_Front = VMA_NULL;
6296 ItemType* m_Back = VMA_NULL;
6306 #if VMA_USE_STL_UNORDERED_MAP
6308 #define VmaPair std::pair
6310 #define VMA_MAP_TYPE(KeyT, ValueT) \
6311 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
6315 template<
typename T1,
typename T2>
6321 VmaPair() : first(), second() { }
6322 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6328 template<
typename KeyT,
typename ValueT>
6332 typedef VmaPair<KeyT, ValueT> PairType;
6333 typedef PairType* iterator;
6335 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6337 iterator begin() {
return m_Vector.begin(); }
6338 iterator end() {
return m_Vector.end(); }
6340 void insert(
const PairType& pair);
6341 iterator find(
const KeyT& key);
6342 void erase(iterator it);
6345 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6348 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6350 template<
typename FirstT,
typename SecondT>
6351 struct VmaPairFirstLess
6353 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6355 return lhs.first < rhs.first;
6357 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6359 return lhs.first < rhsFirst;
6363 template<
typename KeyT,
typename ValueT>
6364 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6366 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6368 m_Vector.data() + m_Vector.size(),
6370 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6371 VmaVectorInsert(m_Vector, indexToInsert, pair);
6374 template<
typename KeyT,
typename ValueT>
6375 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6377 PairType* it = VmaBinaryFindFirstNotLess(
6379 m_Vector.data() + m_Vector.size(),
6381 VmaPairFirstLess<KeyT, ValueT>());
6382 if((it != m_Vector.end()) && (it->first == key))
6388 return m_Vector.end();
6392 template<
typename KeyT,
typename ValueT>
6393 void VmaMap<KeyT, ValueT>::erase(iterator it)
6395 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6404 class VmaDeviceMemoryBlock;
6406 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6408 struct VmaAllocation_T
6411 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6415 FLAG_USER_DATA_STRING = 0x01,
6419 enum ALLOCATION_TYPE
6421 ALLOCATION_TYPE_NONE,
6422 ALLOCATION_TYPE_BLOCK,
6423 ALLOCATION_TYPE_DEDICATED,
6430 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6433 m_pUserData{VMA_NULL},
6434 m_LastUseFrameIndex{currentFrameIndex},
6435 m_MemoryTypeIndex{0},
6436 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6437 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6439 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6441 #if VMA_STATS_STRING_ENABLED
6442 m_CreationFrameIndex = currentFrameIndex;
6443 m_BufferImageUsage = 0;
6449 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6452 VMA_ASSERT(m_pUserData == VMA_NULL);
6455 void InitBlockAllocation(
6456 VmaDeviceMemoryBlock* block,
6457 VkDeviceSize offset,
6458 VkDeviceSize alignment,
6460 uint32_t memoryTypeIndex,
6461 VmaSuballocationType suballocationType,
6465 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6466 VMA_ASSERT(block != VMA_NULL);
6467 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6468 m_Alignment = alignment;
6470 m_MemoryTypeIndex = memoryTypeIndex;
6471 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6472 m_SuballocationType = (uint8_t)suballocationType;
6473 m_BlockAllocation.m_Block = block;
6474 m_BlockAllocation.m_Offset = offset;
6475 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6480 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6481 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6482 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6483 m_MemoryTypeIndex = 0;
6484 m_BlockAllocation.m_Block = VMA_NULL;
6485 m_BlockAllocation.m_Offset = 0;
6486 m_BlockAllocation.m_CanBecomeLost =
true;
6489 void ChangeBlockAllocation(
6491 VmaDeviceMemoryBlock* block,
6492 VkDeviceSize offset);
6494 void ChangeOffset(VkDeviceSize newOffset);
6497 void InitDedicatedAllocation(
6498 uint32_t memoryTypeIndex,
6499 VkDeviceMemory hMemory,
6500 VmaSuballocationType suballocationType,
6504 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6505 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6506 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6509 m_MemoryTypeIndex = memoryTypeIndex;
6510 m_SuballocationType = (uint8_t)suballocationType;
6511 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6512 m_DedicatedAllocation.m_hMemory = hMemory;
6513 m_DedicatedAllocation.m_pMappedData = pMappedData;
6514 m_DedicatedAllocation.m_Prev = VMA_NULL;
6515 m_DedicatedAllocation.m_Next = VMA_NULL;
6518 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6519 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6520 VkDeviceSize GetSize()
const {
return m_Size; }
6521 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6522 void* GetUserData()
const {
return m_pUserData; }
6523 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6524 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6526 VmaDeviceMemoryBlock* GetBlock()
const
6528 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6529 return m_BlockAllocation.m_Block;
6531 VkDeviceSize GetOffset()
const;
6532 VkDeviceMemory GetMemory()
const;
6533 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6534 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6535 void* GetMappedData()
const;
6536 bool CanBecomeLost()
const;
6538 uint32_t GetLastUseFrameIndex()
const
6540 return m_LastUseFrameIndex.load();
6542 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6544 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6554 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6556 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6558 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6569 void BlockAllocMap();
6570 void BlockAllocUnmap();
6571 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6574 #if VMA_STATS_STRING_ENABLED
6575 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6576 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6578 void InitBufferImageUsage(uint32_t bufferImageUsage)
6580 VMA_ASSERT(m_BufferImageUsage == 0);
6581 m_BufferImageUsage = bufferImageUsage;
6584 void PrintParameters(
class VmaJsonWriter& json)
const;
6588 VkDeviceSize m_Alignment;
6589 VkDeviceSize m_Size;
6591 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6592 uint32_t m_MemoryTypeIndex;
6594 uint8_t m_SuballocationType;
6601 struct BlockAllocation
6603 VmaDeviceMemoryBlock* m_Block;
6604 VkDeviceSize m_Offset;
6605 bool m_CanBecomeLost;
6609 struct DedicatedAllocation
6611 VkDeviceMemory m_hMemory;
6612 void* m_pMappedData;
6613 VmaAllocation_T* m_Prev;
6614 VmaAllocation_T* m_Next;
6620 BlockAllocation m_BlockAllocation;
6622 DedicatedAllocation m_DedicatedAllocation;
6625 #if VMA_STATS_STRING_ENABLED
6626 uint32_t m_CreationFrameIndex;
6627 uint32_t m_BufferImageUsage;
6632 friend struct VmaDedicatedAllocationListItemTraits;
6635 struct VmaDedicatedAllocationListItemTraits
6637 typedef VmaAllocation_T ItemType;
6638 static ItemType* GetPrev(
const ItemType* item)
6640 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6641 return item->m_DedicatedAllocation.m_Prev;
6643 static ItemType* GetNext(
const ItemType* item)
6645 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6646 return item->m_DedicatedAllocation.m_Next;
6648 static ItemType*& AccessPrev(ItemType* item)
6650 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6651 return item->m_DedicatedAllocation.m_Prev;
6653 static ItemType*& AccessNext(ItemType* item){
6654 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
6655 return item->m_DedicatedAllocation.m_Next;
6663 struct VmaSuballocation
6665 VkDeviceSize offset;
6668 VmaSuballocationType type;
6672 struct VmaSuballocationOffsetLess
6674 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6676 return lhs.offset < rhs.offset;
6679 struct VmaSuballocationOffsetGreater
6681 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6683 return lhs.offset > rhs.offset;
6687 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6690 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6692 enum class VmaAllocationRequestType
6714 struct VmaAllocationRequest
6716 VkDeviceSize offset;
6717 VkDeviceSize sumFreeSize;
6718 VkDeviceSize sumItemSize;
6719 VmaSuballocationList::iterator item;
6720 size_t itemsToMakeLostCount;
6722 VmaAllocationRequestType type;
6724 VkDeviceSize CalcCost()
const
6726 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6734 class VmaBlockMetadata
6738 virtual ~VmaBlockMetadata() { }
6739 virtual void Init(VkDeviceSize size) { m_Size = size; }
6742 virtual bool Validate()
const = 0;
6743 VkDeviceSize GetSize()
const {
return m_Size; }
6744 virtual size_t GetAllocationCount()
const = 0;
6745 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6746 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6748 virtual bool IsEmpty()
const = 0;
6750 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6752 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6754 #if VMA_STATS_STRING_ENABLED
6755 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6761 virtual bool CreateAllocationRequest(
6762 uint32_t currentFrameIndex,
6763 uint32_t frameInUseCount,
6764 VkDeviceSize bufferImageGranularity,
6765 VkDeviceSize allocSize,
6766 VkDeviceSize allocAlignment,
6768 VmaSuballocationType allocType,
6769 bool canMakeOtherLost,
6772 VmaAllocationRequest* pAllocationRequest) = 0;
6774 virtual bool MakeRequestedAllocationsLost(
6775 uint32_t currentFrameIndex,
6776 uint32_t frameInUseCount,
6777 VmaAllocationRequest* pAllocationRequest) = 0;
6779 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6781 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6785 const VmaAllocationRequest& request,
6786 VmaSuballocationType type,
6787 VkDeviceSize allocSize,
6792 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6795 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6797 #if VMA_STATS_STRING_ENABLED
6798 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6799 VkDeviceSize unusedBytes,
6800 size_t allocationCount,
6801 size_t unusedRangeCount)
const;
6802 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6803 VkDeviceSize offset,
6805 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6806 VkDeviceSize offset,
6807 VkDeviceSize size)
const;
6808 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6812 VkDeviceSize m_Size;
6813 const VkAllocationCallbacks* m_pAllocationCallbacks;
6816 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6817 VMA_ASSERT(0 && "Validation failed: " #cond); \
6821 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6823 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6826 virtual ~VmaBlockMetadata_Generic();
6827 virtual void Init(VkDeviceSize size);
6829 virtual bool Validate()
const;
6830 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6831 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6832 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6833 virtual bool IsEmpty()
const;
6835 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6836 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6838 #if VMA_STATS_STRING_ENABLED
6839 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6842 virtual bool CreateAllocationRequest(
6843 uint32_t currentFrameIndex,
6844 uint32_t frameInUseCount,
6845 VkDeviceSize bufferImageGranularity,
6846 VkDeviceSize allocSize,
6847 VkDeviceSize allocAlignment,
6849 VmaSuballocationType allocType,
6850 bool canMakeOtherLost,
6852 VmaAllocationRequest* pAllocationRequest);
6854 virtual bool MakeRequestedAllocationsLost(
6855 uint32_t currentFrameIndex,
6856 uint32_t frameInUseCount,
6857 VmaAllocationRequest* pAllocationRequest);
6859 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6861 virtual VkResult CheckCorruption(
const void* pBlockData);
6864 const VmaAllocationRequest& request,
6865 VmaSuballocationType type,
6866 VkDeviceSize allocSize,
6870 virtual void FreeAtOffset(VkDeviceSize offset);
6875 bool IsBufferImageGranularityConflictPossible(
6876 VkDeviceSize bufferImageGranularity,
6877 VmaSuballocationType& inOutPrevSuballocType)
const;
6880 friend class VmaDefragmentationAlgorithm_Generic;
6881 friend class VmaDefragmentationAlgorithm_Fast;
6883 uint32_t m_FreeCount;
6884 VkDeviceSize m_SumFreeSize;
6885 VmaSuballocationList m_Suballocations;
6888 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6890 bool ValidateFreeSuballocationList()
const;
6894 bool CheckAllocation(
6895 uint32_t currentFrameIndex,
6896 uint32_t frameInUseCount,
6897 VkDeviceSize bufferImageGranularity,
6898 VkDeviceSize allocSize,
6899 VkDeviceSize allocAlignment,
6900 VmaSuballocationType allocType,
6901 VmaSuballocationList::const_iterator suballocItem,
6902 bool canMakeOtherLost,
6903 VkDeviceSize* pOffset,
6904 size_t* itemsToMakeLostCount,
6905 VkDeviceSize* pSumFreeSize,
6906 VkDeviceSize* pSumItemSize)
const;
6908 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6912 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6915 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6918 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6999 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
7001 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
7004 virtual ~VmaBlockMetadata_Linear();
7005 virtual void Init(VkDeviceSize size);
7007 virtual bool Validate()
const;
7008 virtual size_t GetAllocationCount()
const;
7009 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
7010 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7011 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
7013 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7014 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7016 #if VMA_STATS_STRING_ENABLED
7017 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7020 virtual bool CreateAllocationRequest(
7021 uint32_t currentFrameIndex,
7022 uint32_t frameInUseCount,
7023 VkDeviceSize bufferImageGranularity,
7024 VkDeviceSize allocSize,
7025 VkDeviceSize allocAlignment,
7027 VmaSuballocationType allocType,
7028 bool canMakeOtherLost,
7030 VmaAllocationRequest* pAllocationRequest);
7032 virtual bool MakeRequestedAllocationsLost(
7033 uint32_t currentFrameIndex,
7034 uint32_t frameInUseCount,
7035 VmaAllocationRequest* pAllocationRequest);
7037 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7039 virtual VkResult CheckCorruption(
const void* pBlockData);
7042 const VmaAllocationRequest& request,
7043 VmaSuballocationType type,
7044 VkDeviceSize allocSize,
7048 virtual void FreeAtOffset(VkDeviceSize offset);
7058 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
7060 enum SECOND_VECTOR_MODE
7062 SECOND_VECTOR_EMPTY,
7067 SECOND_VECTOR_RING_BUFFER,
7073 SECOND_VECTOR_DOUBLE_STACK,
7076 VkDeviceSize m_SumFreeSize;
7077 SuballocationVectorType m_Suballocations0, m_Suballocations1;
7078 uint32_t m_1stVectorIndex;
7079 SECOND_VECTOR_MODE m_2ndVectorMode;
7081 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7082 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7083 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
7084 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
7087 size_t m_1stNullItemsBeginCount;
7089 size_t m_1stNullItemsMiddleCount;
7091 size_t m_2ndNullItemsCount;
7093 bool ShouldCompact1st()
const;
7094 void CleanupAfterFree();
7096 bool CreateAllocationRequest_LowerAddress(
7097 uint32_t currentFrameIndex,
7098 uint32_t frameInUseCount,
7099 VkDeviceSize bufferImageGranularity,
7100 VkDeviceSize allocSize,
7101 VkDeviceSize allocAlignment,
7102 VmaSuballocationType allocType,
7103 bool canMakeOtherLost,
7105 VmaAllocationRequest* pAllocationRequest);
7106 bool CreateAllocationRequest_UpperAddress(
7107 uint32_t currentFrameIndex,
7108 uint32_t frameInUseCount,
7109 VkDeviceSize bufferImageGranularity,
7110 VkDeviceSize allocSize,
7111 VkDeviceSize allocAlignment,
7112 VmaSuballocationType allocType,
7113 bool canMakeOtherLost,
7115 VmaAllocationRequest* pAllocationRequest);
7129 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
7131 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
7134 virtual ~VmaBlockMetadata_Buddy();
7135 virtual void Init(VkDeviceSize size);
7137 virtual bool Validate()
const;
7138 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
7139 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
7140 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
7141 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
7143 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
7144 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
7146 #if VMA_STATS_STRING_ENABLED
7147 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
7150 virtual bool CreateAllocationRequest(
7151 uint32_t currentFrameIndex,
7152 uint32_t frameInUseCount,
7153 VkDeviceSize bufferImageGranularity,
7154 VkDeviceSize allocSize,
7155 VkDeviceSize allocAlignment,
7157 VmaSuballocationType allocType,
7158 bool canMakeOtherLost,
7160 VmaAllocationRequest* pAllocationRequest);
7162 virtual bool MakeRequestedAllocationsLost(
7163 uint32_t currentFrameIndex,
7164 uint32_t frameInUseCount,
7165 VmaAllocationRequest* pAllocationRequest);
7167 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
7169 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
7172 const VmaAllocationRequest& request,
7173 VmaSuballocationType type,
7174 VkDeviceSize allocSize,
7177 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
7178 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
7181 static const VkDeviceSize MIN_NODE_SIZE = 32;
7182 static const size_t MAX_LEVELS = 30;
7184 struct ValidationContext
7186 size_t calculatedAllocationCount;
7187 size_t calculatedFreeCount;
7188 VkDeviceSize calculatedSumFreeSize;
7190 ValidationContext() :
7191 calculatedAllocationCount(0),
7192 calculatedFreeCount(0),
7193 calculatedSumFreeSize(0) { }
7198 VkDeviceSize offset;
7228 VkDeviceSize m_UsableSize;
7229 uint32_t m_LevelCount;
7235 } m_FreeList[MAX_LEVELS];
7237 size_t m_AllocationCount;
7241 VkDeviceSize m_SumFreeSize;
7243 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
7244 void DeleteNode(Node* node);
7245 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
7246 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
7247 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
7249 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
7250 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
7254 void AddToFreeListFront(uint32_t level, Node* node);
7258 void RemoveFromFreeList(uint32_t level, Node* node);
7260 #if VMA_STATS_STRING_ENABLED
7261 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
7271 class VmaDeviceMemoryBlock
7273 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
7275 VmaBlockMetadata* m_pMetadata;
7279 ~VmaDeviceMemoryBlock()
7281 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
7282 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
7289 uint32_t newMemoryTypeIndex,
7290 VkDeviceMemory newMemory,
7291 VkDeviceSize newSize,
7293 uint32_t algorithm);
7297 VmaPool GetParentPool()
const {
return m_hParentPool; }
7298 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
7299 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7300 uint32_t GetId()
const {
return m_Id; }
7301 void* GetMappedData()
const {
return m_pMappedData; }
7304 bool Validate()
const;
7309 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
7312 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7313 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
7315 VkResult BindBufferMemory(
7318 VkDeviceSize allocationLocalOffset,
7321 VkResult BindImageMemory(
7324 VkDeviceSize allocationLocalOffset,
7330 uint32_t m_MemoryTypeIndex;
7332 VkDeviceMemory m_hMemory;
7340 uint32_t m_MapCount;
7341 void* m_pMappedData;
7344 struct VmaDefragmentationMove
7346 size_t srcBlockIndex;
7347 size_t dstBlockIndex;
7348 VkDeviceSize srcOffset;
7349 VkDeviceSize dstOffset;
7352 VmaDeviceMemoryBlock* pSrcBlock;
7353 VmaDeviceMemoryBlock* pDstBlock;
7356 class VmaDefragmentationAlgorithm;
7364 struct VmaBlockVector
7366 VMA_CLASS_NO_COPY(VmaBlockVector)
7371 uint32_t memoryTypeIndex,
7372 VkDeviceSize preferredBlockSize,
7373 size_t minBlockCount,
7374 size_t maxBlockCount,
7375 VkDeviceSize bufferImageGranularity,
7376 uint32_t frameInUseCount,
7377 bool explicitBlockSize,
7380 VkDeviceSize minAllocationAlignment,
7381 void* pMemoryAllocateNext);
7384 VkResult CreateMinBlocks();
7386 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7387 VmaPool GetParentPool()
const {
return m_hParentPool; }
7388 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7389 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7390 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7391 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7392 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7393 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7398 bool IsCorruptionDetectionEnabled()
const;
7401 uint32_t currentFrameIndex,
7403 VkDeviceSize alignment,
7405 VmaSuballocationType suballocType,
7406 size_t allocationCount,
7414 #if VMA_STATS_STRING_ENABLED
7415 void PrintDetailedMap(
class VmaJsonWriter& json);
7418 void MakePoolAllocationsLost(
7419 uint32_t currentFrameIndex,
7420 size_t* pLostAllocationCount);
7421 VkResult CheckCorruption();
7425 class VmaBlockVectorDefragmentationContext* pCtx,
7427 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7428 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7429 VkCommandBuffer commandBuffer);
7430 void DefragmentationEnd(
7431 class VmaBlockVectorDefragmentationContext* pCtx,
7435 uint32_t ProcessDefragmentations(
7436 class VmaBlockVectorDefragmentationContext *pCtx,
7439 void CommitDefragmentations(
7440 class VmaBlockVectorDefragmentationContext *pCtx,
7446 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7447 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7448 size_t CalcAllocationCount()
const;
7449 bool IsBufferImageGranularityConflictPossible()
const;
7452 friend class VmaDefragmentationAlgorithm_Generic;
7456 const uint32_t m_MemoryTypeIndex;
7457 const VkDeviceSize m_PreferredBlockSize;
7458 const size_t m_MinBlockCount;
7459 const size_t m_MaxBlockCount;
7460 const VkDeviceSize m_BufferImageGranularity;
7461 const uint32_t m_FrameInUseCount;
7462 const bool m_ExplicitBlockSize;
7463 const uint32_t m_Algorithm;
7464 const float m_Priority;
7465 const VkDeviceSize m_MinAllocationAlignment;
7466 void*
const m_pMemoryAllocateNext;
7467 VMA_RW_MUTEX m_Mutex;
7471 bool m_HasEmptyBlock;
7473 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7474 uint32_t m_NextBlockId;
7476 VkDeviceSize CalcMaxBlockSize()
const;
7479 void Remove(VmaDeviceMemoryBlock* pBlock);
7483 void IncrementallySortBlocks();
7485 VkResult AllocatePage(
7486 uint32_t currentFrameIndex,
7488 VkDeviceSize alignment,
7490 VmaSuballocationType suballocType,
7494 VkResult AllocateFromBlock(
7495 VmaDeviceMemoryBlock* pBlock,
7496 uint32_t currentFrameIndex,
7498 VkDeviceSize alignment,
7501 VmaSuballocationType suballocType,
7505 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7508 void ApplyDefragmentationMovesCpu(
7509 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7510 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7512 void ApplyDefragmentationMovesGpu(
7513 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7514 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7515 VkCommandBuffer commandBuffer);
7523 void UpdateHasEmptyBlock();
7528 VMA_CLASS_NO_COPY(VmaPool_T)
7530 VmaBlockVector m_BlockVector;
7535 VkDeviceSize preferredBlockSize);
7538 uint32_t GetId()
const {
return m_Id; }
7539 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7541 const char* GetName()
const {
return m_Name; }
7542 void SetName(
const char* pName);
7544 #if VMA_STATS_STRING_ENABLED
7551 VmaPool_T* m_PrevPool = VMA_NULL;
7552 VmaPool_T* m_NextPool = VMA_NULL;
7553 friend struct VmaPoolListItemTraits;
7556 struct VmaPoolListItemTraits
7558 typedef VmaPool_T ItemType;
7559 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
7560 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
7561 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
7562 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
7572 class VmaDefragmentationAlgorithm
7574 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7576 VmaDefragmentationAlgorithm(
7578 VmaBlockVector* pBlockVector,
7579 uint32_t currentFrameIndex) :
7580 m_hAllocator(hAllocator),
7581 m_pBlockVector(pBlockVector),
7582 m_CurrentFrameIndex(currentFrameIndex)
7585 virtual ~VmaDefragmentationAlgorithm()
7589 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7590 virtual void AddAll() = 0;
7592 virtual VkResult Defragment(
7593 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7594 VkDeviceSize maxBytesToMove,
7595 uint32_t maxAllocationsToMove,
7598 virtual VkDeviceSize GetBytesMoved()
const = 0;
7599 virtual uint32_t GetAllocationsMoved()
const = 0;
7603 VmaBlockVector*
const m_pBlockVector;
7604 const uint32_t m_CurrentFrameIndex;
7606 struct AllocationInfo
7609 VkBool32* m_pChanged;
7612 m_hAllocation(VK_NULL_HANDLE),
7613 m_pChanged(VMA_NULL)
7617 m_hAllocation(hAlloc),
7618 m_pChanged(pChanged)
7624 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7626 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7628 VmaDefragmentationAlgorithm_Generic(
7630 VmaBlockVector* pBlockVector,
7631 uint32_t currentFrameIndex,
7632 bool overlappingMoveSupported);
7633 virtual ~VmaDefragmentationAlgorithm_Generic();
7635 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7636 virtual void AddAll() { m_AllAllocations =
true; }
7638 virtual VkResult Defragment(
7639 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7640 VkDeviceSize maxBytesToMove,
7641 uint32_t maxAllocationsToMove,
7644 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7645 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7648 uint32_t m_AllocationCount;
7649 bool m_AllAllocations;
7651 VkDeviceSize m_BytesMoved;
7652 uint32_t m_AllocationsMoved;
7654 struct AllocationInfoSizeGreater
7656 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7658 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7662 struct AllocationInfoOffsetGreater
7664 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7666 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7672 size_t m_OriginalBlockIndex;
7673 VmaDeviceMemoryBlock* m_pBlock;
7674 bool m_HasNonMovableAllocations;
7675 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7677 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7678 m_OriginalBlockIndex(SIZE_MAX),
7680 m_HasNonMovableAllocations(true),
7681 m_Allocations(pAllocationCallbacks)
7685 void CalcHasNonMovableAllocations()
7687 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7688 const size_t defragmentAllocCount = m_Allocations.size();
7689 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7692 void SortAllocationsBySizeDescending()
7694 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7697 void SortAllocationsByOffsetDescending()
7699 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7703 struct BlockPointerLess
7705 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7707 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7709 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7711 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7717 struct BlockInfoCompareMoveDestination
7719 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7721 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7725 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7729 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7737 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7738 BlockInfoVector m_Blocks;
7740 VkResult DefragmentRound(
7741 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7742 VkDeviceSize maxBytesToMove,
7743 uint32_t maxAllocationsToMove,
7744 bool freeOldAllocations);
7746 size_t CalcBlocksWithNonMovableCount()
const;
7748 static bool MoveMakesSense(
7749 size_t dstBlockIndex, VkDeviceSize dstOffset,
7750 size_t srcBlockIndex, VkDeviceSize srcOffset);
7753 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7755 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7757 VmaDefragmentationAlgorithm_Fast(
7759 VmaBlockVector* pBlockVector,
7760 uint32_t currentFrameIndex,
7761 bool overlappingMoveSupported);
7762 virtual ~VmaDefragmentationAlgorithm_Fast();
7764 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7765 virtual void AddAll() { m_AllAllocations =
true; }
7767 virtual VkResult Defragment(
7768 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7769 VkDeviceSize maxBytesToMove,
7770 uint32_t maxAllocationsToMove,
7773 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7774 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7779 size_t origBlockIndex;
7782 class FreeSpaceDatabase
7788 s.blockInfoIndex = SIZE_MAX;
7789 for(
size_t i = 0; i < MAX_COUNT; ++i)
7791 m_FreeSpaces[i] = s;
7795 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7797 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7803 size_t bestIndex = SIZE_MAX;
7804 for(
size_t i = 0; i < MAX_COUNT; ++i)
7807 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7812 if(m_FreeSpaces[i].size < size &&
7813 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7819 if(bestIndex != SIZE_MAX)
7821 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7822 m_FreeSpaces[bestIndex].offset = offset;
7823 m_FreeSpaces[bestIndex].size = size;
7827 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7828 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7830 size_t bestIndex = SIZE_MAX;
7831 VkDeviceSize bestFreeSpaceAfter = 0;
7832 for(
size_t i = 0; i < MAX_COUNT; ++i)
7835 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7837 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7839 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7841 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7843 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7846 bestFreeSpaceAfter = freeSpaceAfter;
7852 if(bestIndex != SIZE_MAX)
7854 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7855 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7857 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7860 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7861 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7862 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7867 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7877 static const size_t MAX_COUNT = 4;
7881 size_t blockInfoIndex;
7882 VkDeviceSize offset;
7884 } m_FreeSpaces[MAX_COUNT];
7887 const bool m_OverlappingMoveSupported;
7889 uint32_t m_AllocationCount;
7890 bool m_AllAllocations;
7892 VkDeviceSize m_BytesMoved;
7893 uint32_t m_AllocationsMoved;
7895 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7897 void PreprocessMetadata();
7898 void PostprocessMetadata();
7899 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7902 struct VmaBlockDefragmentationContext
7906 BLOCK_FLAG_USED = 0x00000001,
7912 class VmaBlockVectorDefragmentationContext
7914 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7918 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7919 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7920 uint32_t defragmentationMovesProcessed;
7921 uint32_t defragmentationMovesCommitted;
7922 bool hasDefragmentationPlan;
7924 VmaBlockVectorDefragmentationContext(
7927 VmaBlockVector* pBlockVector,
7928 uint32_t currFrameIndex);
7929 ~VmaBlockVectorDefragmentationContext();
7931 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7932 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7933 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7935 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7936 void AddAll() { m_AllAllocations =
true; }
7945 VmaBlockVector*
const m_pBlockVector;
7946 const uint32_t m_CurrFrameIndex;
7948 VmaDefragmentationAlgorithm* m_pAlgorithm;
7956 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7957 bool m_AllAllocations;
7960 struct VmaDefragmentationContext_T
7963 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7965 VmaDefragmentationContext_T(
7967 uint32_t currFrameIndex,
7970 ~VmaDefragmentationContext_T();
7972 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7973 void AddAllocations(
7974 uint32_t allocationCount,
7976 VkBool32* pAllocationsChanged);
7984 VkResult Defragment(
7985 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7986 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7990 VkResult DefragmentPassEnd();
7994 const uint32_t m_CurrFrameIndex;
7995 const uint32_t m_Flags;
7998 VkDeviceSize m_MaxCpuBytesToMove;
7999 uint32_t m_MaxCpuAllocationsToMove;
8000 VkDeviceSize m_MaxGpuBytesToMove;
8001 uint32_t m_MaxGpuAllocationsToMove;
8004 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
8006 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
8009 #if VMA_RECORDING_ENABLED
8016 void WriteConfiguration(
8017 const VkPhysicalDeviceProperties& devProps,
8018 const VkPhysicalDeviceMemoryProperties& memProps,
8019 uint32_t vulkanApiVersion,
8020 bool dedicatedAllocationExtensionEnabled,
8021 bool bindMemory2ExtensionEnabled,
8022 bool memoryBudgetExtensionEnabled,
8023 bool deviceCoherentMemoryExtensionEnabled);
8026 void RecordCreateAllocator(uint32_t frameIndex);
8027 void RecordDestroyAllocator(uint32_t frameIndex);
8028 void RecordCreatePool(uint32_t frameIndex,
8031 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
8032 void RecordAllocateMemory(uint32_t frameIndex,
8033 const VkMemoryRequirements& vkMemReq,
8036 void RecordAllocateMemoryPages(uint32_t frameIndex,
8037 const VkMemoryRequirements& vkMemReq,
8039 uint64_t allocationCount,
8041 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
8042 const VkMemoryRequirements& vkMemReq,
8043 bool requiresDedicatedAllocation,
8044 bool prefersDedicatedAllocation,
8047 void RecordAllocateMemoryForImage(uint32_t frameIndex,
8048 const VkMemoryRequirements& vkMemReq,
8049 bool requiresDedicatedAllocation,
8050 bool prefersDedicatedAllocation,
8053 void RecordFreeMemory(uint32_t frameIndex,
8055 void RecordFreeMemoryPages(uint32_t frameIndex,
8056 uint64_t allocationCount,
8058 void RecordSetAllocationUserData(uint32_t frameIndex,
8060 const void* pUserData);
8061 void RecordCreateLostAllocation(uint32_t frameIndex,
8063 void RecordMapMemory(uint32_t frameIndex,
8065 void RecordUnmapMemory(uint32_t frameIndex,
8067 void RecordFlushAllocation(uint32_t frameIndex,
8068 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8069 void RecordInvalidateAllocation(uint32_t frameIndex,
8070 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
8071 void RecordCreateBuffer(uint32_t frameIndex,
8072 const VkBufferCreateInfo& bufCreateInfo,
8075 void RecordCreateImage(uint32_t frameIndex,
8076 const VkImageCreateInfo& imageCreateInfo,
8079 void RecordDestroyBuffer(uint32_t frameIndex,
8081 void RecordDestroyImage(uint32_t frameIndex,
8083 void RecordTouchAllocation(uint32_t frameIndex,
8085 void RecordGetAllocationInfo(uint32_t frameIndex,
8087 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
8089 void RecordDefragmentationBegin(uint32_t frameIndex,
8092 void RecordDefragmentationEnd(uint32_t frameIndex,
8094 void RecordSetPoolName(uint32_t frameIndex,
8105 class UserDataString
8109 const char* GetString()
const {
return m_Str; }
8119 VMA_MUTEX m_FileMutex;
8120 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
8122 void GetBasicParams(CallParams& outParams);
8125 template<
typename T>
8126 void PrintPointerList(uint64_t count,
const T* pItems)
8130 fprintf(m_File,
"%p", pItems[0]);
8131 for(uint64_t i = 1; i < count; ++i)
8133 fprintf(m_File,
" %p", pItems[i]);
8138 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
8147 class VmaAllocationObjectAllocator
8149 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
8151 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
8153 template<
typename... Types>
VmaAllocation Allocate(Types... args);
8158 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
8161 struct VmaCurrentBudgetData
8163 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
8164 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
8166 #if VMA_MEMORY_BUDGET
8167 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
8168 VMA_RW_MUTEX m_BudgetMutex;
8169 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
8170 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
8171 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
8174 VmaCurrentBudgetData()
8176 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
8178 m_BlockBytes[heapIndex] = 0;
8179 m_AllocationBytes[heapIndex] = 0;
8180 #if VMA_MEMORY_BUDGET
8181 m_VulkanUsage[heapIndex] = 0;
8182 m_VulkanBudget[heapIndex] = 0;
8183 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
8187 #if VMA_MEMORY_BUDGET
8188 m_OperationsSinceBudgetFetch = 0;
8192 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8194 m_AllocationBytes[heapIndex] += allocationSize;
8195 #if VMA_MEMORY_BUDGET
8196 ++m_OperationsSinceBudgetFetch;
8200 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
8202 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
8203 m_AllocationBytes[heapIndex] -= allocationSize;
8204 #if VMA_MEMORY_BUDGET
8205 ++m_OperationsSinceBudgetFetch;
8211 struct VmaAllocator_T
8213 VMA_CLASS_NO_COPY(VmaAllocator_T)
8216 uint32_t m_VulkanApiVersion;
8217 bool m_UseKhrDedicatedAllocation;
8218 bool m_UseKhrBindMemory2;
8219 bool m_UseExtMemoryBudget;
8220 bool m_UseAmdDeviceCoherentMemory;
8221 bool m_UseKhrBufferDeviceAddress;
8222 bool m_UseExtMemoryPriority;
8224 VkInstance m_hInstance;
8225 bool m_AllocationCallbacksSpecified;
8226 VkAllocationCallbacks m_AllocationCallbacks;
8228 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
8231 uint32_t m_HeapSizeLimitMask;
8233 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
8234 VkPhysicalDeviceMemoryProperties m_MemProps;
8237 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
8239 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
8240 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
8241 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
8243 VmaCurrentBudgetData m_Budget;
8244 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
8250 const VkAllocationCallbacks* GetAllocationCallbacks()
const
8252 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
8256 return m_VulkanFunctions;
8259 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
8261 VkDeviceSize GetBufferImageGranularity()
const
8264 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
8265 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
8268 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
8269 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
8271 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
8273 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
8274 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
8277 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
8279 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
8280 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8283 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
8285 return IsMemoryTypeNonCoherent(memTypeIndex) ?
8286 VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
8287 (VkDeviceSize)VMA_MIN_ALIGNMENT;
8290 bool IsIntegratedGpu()
const
8292 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
8295 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
8297 #if VMA_RECORDING_ENABLED
8298 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
8301 void GetBufferMemoryRequirements(
8303 VkMemoryRequirements& memReq,
8304 bool& requiresDedicatedAllocation,
8305 bool& prefersDedicatedAllocation)
const;
8306 void GetImageMemoryRequirements(
8308 VkMemoryRequirements& memReq,
8309 bool& requiresDedicatedAllocation,
8310 bool& prefersDedicatedAllocation)
const;
8313 VkResult AllocateMemory(
8314 const VkMemoryRequirements& vkMemReq,
8315 bool requiresDedicatedAllocation,
8316 bool prefersDedicatedAllocation,
8317 VkBuffer dedicatedBuffer,
8318 VkBufferUsageFlags dedicatedBufferUsage,
8319 VkImage dedicatedImage,
8321 VmaSuballocationType suballocType,
8322 size_t allocationCount,
8327 size_t allocationCount,
8330 void CalculateStats(
VmaStats* pStats);
8333 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
8335 #if VMA_STATS_STRING_ENABLED
8336 void PrintDetailedMap(
class VmaJsonWriter& json);
8339 VkResult DefragmentationBegin(
8343 VkResult DefragmentationEnd(
8346 VkResult DefragmentationPassBegin(
8349 VkResult DefragmentationPassEnd(
8356 void DestroyPool(
VmaPool pool);
8359 void SetCurrentFrameIndex(uint32_t frameIndex);
8360 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8362 void MakePoolAllocationsLost(
8364 size_t* pLostAllocationCount);
8365 VkResult CheckPoolCorruption(
VmaPool hPool);
8366 VkResult CheckCorruption(uint32_t memoryTypeBits);
8371 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8373 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8375 VkResult BindVulkanBuffer(
8376 VkDeviceMemory memory,
8377 VkDeviceSize memoryOffset,
8381 VkResult BindVulkanImage(
8382 VkDeviceMemory memory,
8383 VkDeviceSize memoryOffset,
8390 VkResult BindBufferMemory(
8392 VkDeviceSize allocationLocalOffset,
8395 VkResult BindImageMemory(
8397 VkDeviceSize allocationLocalOffset,
8401 VkResult FlushOrInvalidateAllocation(
8403 VkDeviceSize offset, VkDeviceSize size,
8404 VMA_CACHE_OPERATION op);
8405 VkResult FlushOrInvalidateAllocations(
8406 uint32_t allocationCount,
8408 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8409 VMA_CACHE_OPERATION op);
8411 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8417 uint32_t GetGpuDefragmentationMemoryTypeBits();
8419 #if VMA_EXTERNAL_MEMORY
8420 VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex)
const
8422 return m_TypeExternalMemoryHandleTypes[memTypeIndex];
8427 VkDeviceSize m_PreferredLargeHeapBlockSize;
8429 VkPhysicalDevice m_PhysicalDevice;
8430 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8431 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8432 #if VMA_EXTERNAL_MEMORY
8433 VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES];
8436 VMA_RW_MUTEX m_PoolsMutex;
8437 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
8440 uint32_t m_NextPoolId;
8445 uint32_t m_GlobalMemoryTypeBits;
8447 #if VMA_RECORDING_ENABLED
8448 VmaRecorder* m_pRecorder;
8453 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8454 void ImportVulkanFunctions_Static();
8459 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8460 void ImportVulkanFunctions_Dynamic();
8463 void ValidateVulkanFunctions();
8465 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8467 VkResult AllocateMemoryOfType(
8469 VkDeviceSize alignment,
8470 bool dedicatedAllocation,
8471 VkBuffer dedicatedBuffer,
8472 VkBufferUsageFlags dedicatedBufferUsage,
8473 VkImage dedicatedImage,
8475 uint32_t memTypeIndex,
8476 VmaSuballocationType suballocType,
8477 size_t allocationCount,
8481 VkResult AllocateDedicatedMemoryPage(
8483 VmaSuballocationType suballocType,
8484 uint32_t memTypeIndex,
8485 const VkMemoryAllocateInfo& allocInfo,
8487 bool isUserDataString,
8492 VkResult AllocateDedicatedMemory(
8494 VmaSuballocationType suballocType,
8495 uint32_t memTypeIndex,
8498 bool isUserDataString,
8501 VkBuffer dedicatedBuffer,
8502 VkBufferUsageFlags dedicatedBufferUsage,
8503 VkImage dedicatedImage,
8504 size_t allocationCount,
8513 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8515 uint32_t CalculateGlobalMemoryTypeBits()
const;
8517 bool GetFlushOrInvalidateRange(
8519 VkDeviceSize offset, VkDeviceSize size,
8520 VkMappedMemoryRange& outRange)
const;
8522 #if VMA_MEMORY_BUDGET
8523 void UpdateVulkanBudget();
8530 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8532 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8535 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8537 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8540 template<
typename T>
8543 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8546 template<
typename T>
8547 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8549 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8552 template<
typename T>
8553 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8558 VmaFree(hAllocator, ptr);
8562 template<
typename T>
8563 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8567 for(
size_t i = count; i--; )
8569 VmaFree(hAllocator, ptr);
8576 #if VMA_STATS_STRING_ENABLED
8578 class VmaStringBuilder
8581 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8582 size_t GetLength()
const {
return m_Data.size(); }
8583 const char* GetData()
const {
return m_Data.data(); }
8585 void Add(
char ch) { m_Data.push_back(ch); }
8586 void Add(
const char* pStr);
8587 void AddNewLine() { Add(
'\n'); }
8588 void AddNumber(uint32_t num);
8589 void AddNumber(uint64_t num);
8590 void AddPointer(
const void* ptr);
8593 VmaVector< char, VmaStlAllocator<char> > m_Data;
8596 void VmaStringBuilder::Add(
const char* pStr)
8598 const size_t strLen = strlen(pStr);
8601 const size_t oldCount = m_Data.size();
8602 m_Data.resize(oldCount + strLen);
8603 memcpy(m_Data.data() + oldCount, pStr, strLen);
8607 void VmaStringBuilder::AddNumber(uint32_t num)
8614 *--p =
'0' + (num % 10);
8621 void VmaStringBuilder::AddNumber(uint64_t num)
8628 *--p =
'0' + (num % 10);
8635 void VmaStringBuilder::AddPointer(
const void* ptr)
8638 VmaPtrToStr(buf,
sizeof(buf), ptr);
8647 #if VMA_STATS_STRING_ENABLED
8651 VMA_CLASS_NO_COPY(VmaJsonWriter)
8653 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8656 void BeginObject(
bool singleLine =
false);
8659 void BeginArray(
bool singleLine =
false);
8662 void WriteString(
const char* pStr);
8663 void BeginString(
const char* pStr = VMA_NULL);
8664 void ContinueString(
const char* pStr);
8665 void ContinueString(uint32_t n);
8666 void ContinueString(uint64_t n);
8667 void ContinueString_Pointer(
const void* ptr);
8668 void EndString(
const char* pStr = VMA_NULL);
8670 void WriteNumber(uint32_t n);
8671 void WriteNumber(uint64_t n);
8672 void WriteBool(
bool b);
8676 static const char*
const INDENT;
8678 enum COLLECTION_TYPE
8680 COLLECTION_TYPE_OBJECT,
8681 COLLECTION_TYPE_ARRAY,
8685 COLLECTION_TYPE type;
8686 uint32_t valueCount;
8687 bool singleLineMode;
8690 VmaStringBuilder& m_SB;
8691 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8692 bool m_InsideString;
8694 void BeginValue(
bool isString);
8695 void WriteIndent(
bool oneLess =
false);
8698 const char*
const VmaJsonWriter::INDENT =
" ";
8700 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8702 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8703 m_InsideString(false)
8707 VmaJsonWriter::~VmaJsonWriter()
8709 VMA_ASSERT(!m_InsideString);
8710 VMA_ASSERT(m_Stack.empty());
8713 void VmaJsonWriter::BeginObject(
bool singleLine)
8715 VMA_ASSERT(!m_InsideString);
8721 item.type = COLLECTION_TYPE_OBJECT;
8722 item.valueCount = 0;
8723 item.singleLineMode = singleLine;
8724 m_Stack.push_back(item);
8727 void VmaJsonWriter::EndObject()
8729 VMA_ASSERT(!m_InsideString);
8734 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8738 void VmaJsonWriter::BeginArray(
bool singleLine)
8740 VMA_ASSERT(!m_InsideString);
8746 item.type = COLLECTION_TYPE_ARRAY;
8747 item.valueCount = 0;
8748 item.singleLineMode = singleLine;
8749 m_Stack.push_back(item);
8752 void VmaJsonWriter::EndArray()
8754 VMA_ASSERT(!m_InsideString);
8759 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8763 void VmaJsonWriter::WriteString(
const char* pStr)
8769 void VmaJsonWriter::BeginString(
const char* pStr)
8771 VMA_ASSERT(!m_InsideString);
8775 m_InsideString =
true;
8776 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8778 ContinueString(pStr);
8782 void VmaJsonWriter::ContinueString(
const char* pStr)
8784 VMA_ASSERT(m_InsideString);
8786 const size_t strLen = strlen(pStr);
8787 for(
size_t i = 0; i < strLen; ++i)
8820 VMA_ASSERT(0 &&
"Character not currently supported.");
8826 void VmaJsonWriter::ContinueString(uint32_t n)
8828 VMA_ASSERT(m_InsideString);
8832 void VmaJsonWriter::ContinueString(uint64_t n)
8834 VMA_ASSERT(m_InsideString);
8838 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8840 VMA_ASSERT(m_InsideString);
8841 m_SB.AddPointer(ptr);
8844 void VmaJsonWriter::EndString(
const char* pStr)
8846 VMA_ASSERT(m_InsideString);
8847 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8849 ContinueString(pStr);
8852 m_InsideString =
false;
8855 void VmaJsonWriter::WriteNumber(uint32_t n)
8857 VMA_ASSERT(!m_InsideString);
8862 void VmaJsonWriter::WriteNumber(uint64_t n)
8864 VMA_ASSERT(!m_InsideString);
8869 void VmaJsonWriter::WriteBool(
bool b)
8871 VMA_ASSERT(!m_InsideString);
8873 m_SB.Add(b ?
"true" :
"false");
8876 void VmaJsonWriter::WriteNull()
8878 VMA_ASSERT(!m_InsideString);
8883 void VmaJsonWriter::BeginValue(
bool isString)
8885 if(!m_Stack.empty())
8887 StackItem& currItem = m_Stack.back();
8888 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8889 currItem.valueCount % 2 == 0)
8891 VMA_ASSERT(isString);
8894 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8895 currItem.valueCount % 2 != 0)
8899 else if(currItem.valueCount > 0)
8908 ++currItem.valueCount;
8912 void VmaJsonWriter::WriteIndent(
bool oneLess)
8914 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8918 size_t count = m_Stack.size();
8919 if(count > 0 && oneLess)
8923 for(
size_t i = 0; i < count; ++i)
8934 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8936 if(IsUserDataString())
8938 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8940 FreeUserDataString(hAllocator);
8942 if(pUserData != VMA_NULL)
8944 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8949 m_pUserData = pUserData;
8953 void VmaAllocation_T::ChangeBlockAllocation(
8955 VmaDeviceMemoryBlock* block,
8956 VkDeviceSize offset)
8958 VMA_ASSERT(block != VMA_NULL);
8959 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8962 if(block != m_BlockAllocation.m_Block)
8964 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8965 if(IsPersistentMap())
8967 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8968 block->Map(hAllocator, mapRefCount, VMA_NULL);
8971 m_BlockAllocation.m_Block = block;
8972 m_BlockAllocation.m_Offset = offset;
8975 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8977 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8978 m_BlockAllocation.m_Offset = newOffset;
8981 VkDeviceSize VmaAllocation_T::GetOffset()
const
8985 case ALLOCATION_TYPE_BLOCK:
8986 return m_BlockAllocation.m_Offset;
8987 case ALLOCATION_TYPE_DEDICATED:
8995 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8999 case ALLOCATION_TYPE_BLOCK:
9000 return m_BlockAllocation.m_Block->GetDeviceMemory();
9001 case ALLOCATION_TYPE_DEDICATED:
9002 return m_DedicatedAllocation.m_hMemory;
9005 return VK_NULL_HANDLE;
9009 void* VmaAllocation_T::GetMappedData()
const
9013 case ALLOCATION_TYPE_BLOCK:
9016 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
9017 VMA_ASSERT(pBlockData != VMA_NULL);
9018 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
9025 case ALLOCATION_TYPE_DEDICATED:
9026 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
9027 return m_DedicatedAllocation.m_pMappedData;
9034 bool VmaAllocation_T::CanBecomeLost()
const
9038 case ALLOCATION_TYPE_BLOCK:
9039 return m_BlockAllocation.m_CanBecomeLost;
9040 case ALLOCATION_TYPE_DEDICATED:
9048 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9050 VMA_ASSERT(CanBecomeLost());
9056 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
9059 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9064 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
9070 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
9080 #if VMA_STATS_STRING_ENABLED
9083 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
9092 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
9094 json.WriteString(
"Type");
9095 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
9097 json.WriteString(
"Size");
9098 json.WriteNumber(m_Size);
9100 if(m_pUserData != VMA_NULL)
9102 json.WriteString(
"UserData");
9103 if(IsUserDataString())
9105 json.WriteString((
const char*)m_pUserData);
9110 json.ContinueString_Pointer(m_pUserData);
9115 json.WriteString(
"CreationFrameIndex");
9116 json.WriteNumber(m_CreationFrameIndex);
9118 json.WriteString(
"LastUseFrameIndex");
9119 json.WriteNumber(GetLastUseFrameIndex());
9121 if(m_BufferImageUsage != 0)
9123 json.WriteString(
"Usage");
9124 json.WriteNumber(m_BufferImageUsage);
9130 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
9132 VMA_ASSERT(IsUserDataString());
9133 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
9134 m_pUserData = VMA_NULL;
9137 void VmaAllocation_T::BlockAllocMap()
9139 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9141 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9147 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
9151 void VmaAllocation_T::BlockAllocUnmap()
9153 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
9155 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9161 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
9165 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
9167 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9171 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
9173 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
9174 *ppData = m_DedicatedAllocation.m_pMappedData;
9180 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
9181 return VK_ERROR_MEMORY_MAP_FAILED;
9186 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9187 hAllocator->m_hDevice,
9188 m_DedicatedAllocation.m_hMemory,
9193 if(result == VK_SUCCESS)
9195 m_DedicatedAllocation.m_pMappedData = *ppData;
9202 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
9204 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
9206 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
9211 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
9212 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
9213 hAllocator->m_hDevice,
9214 m_DedicatedAllocation.m_hMemory);
9219 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
9223 #if VMA_STATS_STRING_ENABLED
9225 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
9229 json.WriteString(
"Blocks");
9232 json.WriteString(
"Allocations");
9235 json.WriteString(
"UnusedRanges");
9238 json.WriteString(
"UsedBytes");
9241 json.WriteString(
"UnusedBytes");
9246 json.WriteString(
"AllocationSize");
9247 json.BeginObject(
true);
9248 json.WriteString(
"Min");
9250 json.WriteString(
"Avg");
9252 json.WriteString(
"Max");
9259 json.WriteString(
"UnusedRangeSize");
9260 json.BeginObject(
true);
9261 json.WriteString(
"Min");
9263 json.WriteString(
"Avg");
9265 json.WriteString(
"Max");
9275 struct VmaSuballocationItemSizeLess
9278 const VmaSuballocationList::iterator lhs,
9279 const VmaSuballocationList::iterator rhs)
const
9281 return lhs->size < rhs->size;
9284 const VmaSuballocationList::iterator lhs,
9285 VkDeviceSize rhsSize)
const
9287 return lhs->size < rhsSize;
9295 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
9297 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
9301 #if VMA_STATS_STRING_ENABLED
9303 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
9304 VkDeviceSize unusedBytes,
9305 size_t allocationCount,
9306 size_t unusedRangeCount)
const
9310 json.WriteString(
"TotalBytes");
9311 json.WriteNumber(GetSize());
9313 json.WriteString(
"UnusedBytes");
9314 json.WriteNumber(unusedBytes);
9316 json.WriteString(
"Allocations");
9317 json.WriteNumber((uint64_t)allocationCount);
9319 json.WriteString(
"UnusedRanges");
9320 json.WriteNumber((uint64_t)unusedRangeCount);
9322 json.WriteString(
"Suballocations");
9326 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
9327 VkDeviceSize offset,
9330 json.BeginObject(
true);
9332 json.WriteString(
"Offset");
9333 json.WriteNumber(offset);
9335 hAllocation->PrintParameters(json);
9340 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
9341 VkDeviceSize offset,
9342 VkDeviceSize size)
const
9344 json.BeginObject(
true);
9346 json.WriteString(
"Offset");
9347 json.WriteNumber(offset);
9349 json.WriteString(
"Type");
9350 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
9352 json.WriteString(
"Size");
9353 json.WriteNumber(size);
9358 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
9369 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
9370 VmaBlockMetadata(hAllocator),
9373 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9374 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9378 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9382 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9384 VmaBlockMetadata::Init(size);
9387 m_SumFreeSize = size;
9389 VmaSuballocation suballoc = {};
9390 suballoc.offset = 0;
9391 suballoc.size = size;
9392 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9393 suballoc.hAllocation = VK_NULL_HANDLE;
9395 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9396 m_Suballocations.push_back(suballoc);
9397 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9399 m_FreeSuballocationsBySize.push_back(suballocItem);
9402 bool VmaBlockMetadata_Generic::Validate()
const
9404 VMA_VALIDATE(!m_Suballocations.empty());
9407 VkDeviceSize calculatedOffset = 0;
9409 uint32_t calculatedFreeCount = 0;
9411 VkDeviceSize calculatedSumFreeSize = 0;
9414 size_t freeSuballocationsToRegister = 0;
9416 bool prevFree =
false;
9418 for(
const auto& subAlloc : m_Suballocations)
9421 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9423 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9425 VMA_VALIDATE(!prevFree || !currFree);
9427 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9431 calculatedSumFreeSize += subAlloc.size;
9432 ++calculatedFreeCount;
9433 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9435 ++freeSuballocationsToRegister;
9439 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9443 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9444 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9447 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9450 calculatedOffset += subAlloc.size;
9451 prevFree = currFree;
9456 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9458 VkDeviceSize lastSize = 0;
9459 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9461 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9464 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9466 VMA_VALIDATE(suballocItem->size >= lastSize);
9468 lastSize = suballocItem->size;
9472 VMA_VALIDATE(ValidateFreeSuballocationList());
9473 VMA_VALIDATE(calculatedOffset == GetSize());
9474 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9475 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9480 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9482 if(!m_FreeSuballocationsBySize.empty())
9484 return m_FreeSuballocationsBySize.back()->size;
9492 bool VmaBlockMetadata_Generic::IsEmpty()
const
9494 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9497 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9501 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9513 for(
const auto& suballoc : m_Suballocations)
9515 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9528 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9530 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9532 inoutStats.
size += GetSize();
9539 #if VMA_STATS_STRING_ENABLED
9541 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9543 PrintDetailedMap_Begin(json,
9545 m_Suballocations.size() - (
size_t)m_FreeCount,
9549 for(
const auto& suballoc : m_Suballocations)
9551 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9553 PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size);
9557 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9561 PrintDetailedMap_End(json);
9566 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9567 uint32_t currentFrameIndex,
9568 uint32_t frameInUseCount,
9569 VkDeviceSize bufferImageGranularity,
9570 VkDeviceSize allocSize,
9571 VkDeviceSize allocAlignment,
9573 VmaSuballocationType allocType,
9574 bool canMakeOtherLost,
9576 VmaAllocationRequest* pAllocationRequest)
9578 VMA_ASSERT(allocSize > 0);
9579 VMA_ASSERT(!upperAddress);
9580 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9581 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9582 VMA_HEAVY_ASSERT(Validate());
9584 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9587 if(canMakeOtherLost ==
false &&
9588 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9594 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9595 if(freeSuballocCount > 0)
9600 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9601 m_FreeSuballocationsBySize.data(),
9602 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9603 allocSize + 2 * VMA_DEBUG_MARGIN,
9604 VmaSuballocationItemSizeLess());
9605 size_t index = it - m_FreeSuballocationsBySize.data();
9606 for(; index < freeSuballocCount; ++index)
9611 bufferImageGranularity,
9615 m_FreeSuballocationsBySize[index],
9617 &pAllocationRequest->offset,
9618 &pAllocationRequest->itemsToMakeLostCount,
9619 &pAllocationRequest->sumFreeSize,
9620 &pAllocationRequest->sumItemSize))
9622 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9627 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9629 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9630 it != m_Suballocations.end();
9633 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9636 bufferImageGranularity,
9642 &pAllocationRequest->offset,
9643 &pAllocationRequest->itemsToMakeLostCount,
9644 &pAllocationRequest->sumFreeSize,
9645 &pAllocationRequest->sumItemSize))
9647 pAllocationRequest->item = it;
9655 for(
size_t index = freeSuballocCount; index--; )
9660 bufferImageGranularity,
9664 m_FreeSuballocationsBySize[index],
9666 &pAllocationRequest->offset,
9667 &pAllocationRequest->itemsToMakeLostCount,
9668 &pAllocationRequest->sumFreeSize,
9669 &pAllocationRequest->sumItemSize))
9671 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9678 if(canMakeOtherLost)
9683 VmaAllocationRequest tmpAllocRequest = {};
9684 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9685 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9686 suballocIt != m_Suballocations.end();
9689 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9690 suballocIt->hAllocation->CanBecomeLost())
9695 bufferImageGranularity,
9701 &tmpAllocRequest.offset,
9702 &tmpAllocRequest.itemsToMakeLostCount,
9703 &tmpAllocRequest.sumFreeSize,
9704 &tmpAllocRequest.sumItemSize))
9708 *pAllocationRequest = tmpAllocRequest;
9709 pAllocationRequest->item = suballocIt;
9712 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9714 *pAllocationRequest = tmpAllocRequest;
9715 pAllocationRequest->item = suballocIt;
9728 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9729 uint32_t currentFrameIndex,
9730 uint32_t frameInUseCount,
9731 VmaAllocationRequest* pAllocationRequest)
9733 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9735 while(pAllocationRequest->itemsToMakeLostCount > 0)
9737 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9739 ++pAllocationRequest->item;
9741 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9742 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9743 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9744 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9746 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9747 --pAllocationRequest->itemsToMakeLostCount;
9755 VMA_HEAVY_ASSERT(Validate());
9756 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9757 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9762 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9764 uint32_t lostAllocationCount = 0;
9765 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9766 it != m_Suballocations.end();
9769 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9770 it->hAllocation->CanBecomeLost() &&
9771 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9773 it = FreeSuballocation(it);
9774 ++lostAllocationCount;
9777 return lostAllocationCount;
9780 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9782 for(
auto& suballoc : m_Suballocations)
9784 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9786 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9788 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9789 return VK_ERROR_VALIDATION_FAILED_EXT;
9791 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9793 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9794 return VK_ERROR_VALIDATION_FAILED_EXT;
9802 void VmaBlockMetadata_Generic::Alloc(
9803 const VmaAllocationRequest& request,
9804 VmaSuballocationType type,
9805 VkDeviceSize allocSize,
9808 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9809 VMA_ASSERT(request.item != m_Suballocations.end());
9810 VmaSuballocation& suballoc = *request.item;
9812 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9814 VMA_ASSERT(request.offset >= suballoc.offset);
9815 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9816 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9817 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9821 UnregisterFreeSuballocation(request.item);
9823 suballoc.offset = request.offset;
9824 suballoc.size = allocSize;
9825 suballoc.type = type;
9826 suballoc.hAllocation = hAllocation;
9831 VmaSuballocation paddingSuballoc = {};
9832 paddingSuballoc.offset = request.offset + allocSize;
9833 paddingSuballoc.size = paddingEnd;
9834 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9835 VmaSuballocationList::iterator next = request.item;
9837 const VmaSuballocationList::iterator paddingEndItem =
9838 m_Suballocations.insert(next, paddingSuballoc);
9839 RegisterFreeSuballocation(paddingEndItem);
9845 VmaSuballocation paddingSuballoc = {};
9846 paddingSuballoc.offset = request.offset - paddingBegin;
9847 paddingSuballoc.size = paddingBegin;
9848 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9849 const VmaSuballocationList::iterator paddingBeginItem =
9850 m_Suballocations.insert(request.item, paddingSuballoc);
9851 RegisterFreeSuballocation(paddingBeginItem);
9855 m_FreeCount = m_FreeCount - 1;
9856 if(paddingBegin > 0)
9864 m_SumFreeSize -= allocSize;
9867 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9869 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9870 suballocItem != m_Suballocations.end();
9873 VmaSuballocation& suballoc = *suballocItem;
9874 if(suballoc.hAllocation == allocation)
9876 FreeSuballocation(suballocItem);
9877 VMA_HEAVY_ASSERT(Validate());
9881 VMA_ASSERT(0 &&
"Not found!");
9884 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9886 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9887 suballocItem != m_Suballocations.end();
9890 VmaSuballocation& suballoc = *suballocItem;
9891 if(suballoc.offset == offset)
9893 FreeSuballocation(suballocItem);
9897 VMA_ASSERT(0 &&
"Not found!");
9900 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9902 VkDeviceSize lastSize = 0;
9903 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9905 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9907 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9908 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9909 VMA_VALIDATE(it->size >= lastSize);
9910 lastSize = it->size;
9915 bool VmaBlockMetadata_Generic::CheckAllocation(
9916 uint32_t currentFrameIndex,
9917 uint32_t frameInUseCount,
9918 VkDeviceSize bufferImageGranularity,
9919 VkDeviceSize allocSize,
9920 VkDeviceSize allocAlignment,
9921 VmaSuballocationType allocType,
9922 VmaSuballocationList::const_iterator suballocItem,
9923 bool canMakeOtherLost,
9924 VkDeviceSize* pOffset,
9925 size_t* itemsToMakeLostCount,
9926 VkDeviceSize* pSumFreeSize,
9927 VkDeviceSize* pSumItemSize)
const
9929 VMA_ASSERT(allocSize > 0);
9930 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9931 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9932 VMA_ASSERT(pOffset != VMA_NULL);
9934 *itemsToMakeLostCount = 0;
9938 if(canMakeOtherLost)
9940 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9942 *pSumFreeSize = suballocItem->size;
9946 if(suballocItem->hAllocation->CanBecomeLost() &&
9947 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9949 ++*itemsToMakeLostCount;
9950 *pSumItemSize = suballocItem->size;
9959 if(GetSize() - suballocItem->offset < allocSize)
9965 *pOffset = suballocItem->offset;
9968 if(VMA_DEBUG_MARGIN > 0)
9970 *pOffset += VMA_DEBUG_MARGIN;
9974 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9978 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
9980 bool bufferImageGranularityConflict =
false;
9981 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9982 while(prevSuballocItem != m_Suballocations.cbegin())
9985 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9986 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9988 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9990 bufferImageGranularityConflict =
true;
9998 if(bufferImageGranularityConflict)
10000 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10006 if(*pOffset >= suballocItem->offset + suballocItem->size)
10012 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
10015 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10017 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
10019 if(suballocItem->offset + totalSize > GetSize())
10026 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
10027 if(totalSize > suballocItem->size)
10029 VkDeviceSize remainingSize = totalSize - suballocItem->size;
10030 while(remainingSize > 0)
10032 ++lastSuballocItem;
10033 if(lastSuballocItem == m_Suballocations.cend())
10037 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10039 *pSumFreeSize += lastSuballocItem->size;
10043 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
10044 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
10045 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10047 ++*itemsToMakeLostCount;
10048 *pSumItemSize += lastSuballocItem->size;
10055 remainingSize = (lastSuballocItem->size < remainingSize) ?
10056 remainingSize - lastSuballocItem->size : 0;
10062 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10064 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
10065 ++nextSuballocItem;
10066 while(nextSuballocItem != m_Suballocations.cend())
10068 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10069 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10071 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10073 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
10074 if(nextSuballoc.hAllocation->CanBecomeLost() &&
10075 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10077 ++*itemsToMakeLostCount;
10090 ++nextSuballocItem;
10096 const VmaSuballocation& suballoc = *suballocItem;
10097 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10099 *pSumFreeSize = suballoc.size;
10102 if(suballoc.size < allocSize)
10108 *pOffset = suballoc.offset;
10111 if(VMA_DEBUG_MARGIN > 0)
10113 *pOffset += VMA_DEBUG_MARGIN;
10117 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
10121 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
10123 bool bufferImageGranularityConflict =
false;
10124 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
10125 while(prevSuballocItem != m_Suballocations.cbegin())
10127 --prevSuballocItem;
10128 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
10129 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
10131 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10133 bufferImageGranularityConflict =
true;
10141 if(bufferImageGranularityConflict)
10143 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
10148 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
10151 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
10154 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
10161 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
10163 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
10164 ++nextSuballocItem;
10165 while(nextSuballocItem != m_Suballocations.cend())
10167 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
10168 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10170 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10180 ++nextSuballocItem;
10189 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
10191 VMA_ASSERT(item != m_Suballocations.end());
10192 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10194 VmaSuballocationList::iterator nextItem = item;
10196 VMA_ASSERT(nextItem != m_Suballocations.end());
10197 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
10199 item->size += nextItem->size;
10201 m_Suballocations.erase(nextItem);
10204 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
10207 VmaSuballocation& suballoc = *suballocItem;
10208 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10209 suballoc.hAllocation = VK_NULL_HANDLE;
10213 m_SumFreeSize += suballoc.size;
10216 bool mergeWithNext =
false;
10217 bool mergeWithPrev =
false;
10219 VmaSuballocationList::iterator nextItem = suballocItem;
10221 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
10223 mergeWithNext =
true;
10226 VmaSuballocationList::iterator prevItem = suballocItem;
10227 if(suballocItem != m_Suballocations.begin())
10230 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
10232 mergeWithPrev =
true;
10238 UnregisterFreeSuballocation(nextItem);
10239 MergeFreeWithNext(suballocItem);
10244 UnregisterFreeSuballocation(prevItem);
10245 MergeFreeWithNext(prevItem);
10246 RegisterFreeSuballocation(prevItem);
10251 RegisterFreeSuballocation(suballocItem);
10252 return suballocItem;
10256 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
10258 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10259 VMA_ASSERT(item->size > 0);
10263 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10265 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10267 if(m_FreeSuballocationsBySize.empty())
10269 m_FreeSuballocationsBySize.push_back(item);
10273 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
10281 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
10283 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
10284 VMA_ASSERT(item->size > 0);
10288 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
10290 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
10292 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
10293 m_FreeSuballocationsBySize.data(),
10294 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
10296 VmaSuballocationItemSizeLess());
10297 for(
size_t index = it - m_FreeSuballocationsBySize.data();
10298 index < m_FreeSuballocationsBySize.size();
10301 if(m_FreeSuballocationsBySize[index] == item)
10303 VmaVectorRemove(m_FreeSuballocationsBySize, index);
10306 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
10308 VMA_ASSERT(0 &&
"Not found.");
10314 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
10315 VkDeviceSize bufferImageGranularity,
10316 VmaSuballocationType& inOutPrevSuballocType)
const
10318 if(bufferImageGranularity == 1 || IsEmpty())
10323 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
10324 bool typeConflictFound =
false;
10325 for(
const auto& suballoc : m_Suballocations)
10327 const VmaSuballocationType suballocType = suballoc.type;
10328 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
10330 minAlignment = VMA_MIN(minAlignment, suballoc.hAllocation->GetAlignment());
10331 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
10333 typeConflictFound =
true;
10335 inOutPrevSuballocType = suballocType;
10339 return typeConflictFound || minAlignment >= bufferImageGranularity;
10345 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
10346 VmaBlockMetadata(hAllocator),
10348 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10349 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
10350 m_1stVectorIndex(0),
10351 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
10352 m_1stNullItemsBeginCount(0),
10353 m_1stNullItemsMiddleCount(0),
10354 m_2ndNullItemsCount(0)
10358 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10362 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10364 VmaBlockMetadata::Init(size);
10365 m_SumFreeSize = size;
10368 bool VmaBlockMetadata_Linear::Validate()
const
10370 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10371 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10373 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10374 VMA_VALIDATE(!suballocations1st.empty() ||
10375 suballocations2nd.empty() ||
10376 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10378 if(!suballocations1st.empty())
10381 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10383 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10385 if(!suballocations2nd.empty())
10388 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10391 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10392 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10394 VkDeviceSize sumUsedSize = 0;
10395 const size_t suballoc1stCount = suballocations1st.size();
10396 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10398 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10400 const size_t suballoc2ndCount = suballocations2nd.size();
10401 size_t nullItem2ndCount = 0;
10402 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10404 const VmaSuballocation& suballoc = suballocations2nd[i];
10405 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10407 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10408 VMA_VALIDATE(suballoc.offset >= offset);
10412 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10413 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10414 sumUsedSize += suballoc.size;
10418 ++nullItem2ndCount;
10421 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10424 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10427 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10429 const VmaSuballocation& suballoc = suballocations1st[i];
10430 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10431 suballoc.hAllocation == VK_NULL_HANDLE);
10434 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10436 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10438 const VmaSuballocation& suballoc = suballocations1st[i];
10439 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10441 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10442 VMA_VALIDATE(suballoc.offset >= offset);
10443 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10447 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10448 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10449 sumUsedSize += suballoc.size;
10453 ++nullItem1stCount;
10456 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10458 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10460 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10462 const size_t suballoc2ndCount = suballocations2nd.size();
10463 size_t nullItem2ndCount = 0;
10464 for(
size_t i = suballoc2ndCount; i--; )
10466 const VmaSuballocation& suballoc = suballocations2nd[i];
10467 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10469 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10470 VMA_VALIDATE(suballoc.offset >= offset);
10474 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10475 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10476 sumUsedSize += suballoc.size;
10480 ++nullItem2ndCount;
10483 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10486 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10489 VMA_VALIDATE(offset <= GetSize());
10490 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10495 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10497 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10498 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10501 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10503 const VkDeviceSize size = GetSize();
10515 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10517 switch(m_2ndVectorMode)
10519 case SECOND_VECTOR_EMPTY:
10525 const size_t suballocations1stCount = suballocations1st.size();
10526 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10527 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10528 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10530 firstSuballoc.offset,
10531 size - (lastSuballoc.offset + lastSuballoc.size));
10535 case SECOND_VECTOR_RING_BUFFER:
10540 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10541 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10542 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10543 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10547 case SECOND_VECTOR_DOUBLE_STACK:
10552 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10553 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10554 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10555 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10565 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10567 const VkDeviceSize size = GetSize();
10568 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10569 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10570 const size_t suballoc1stCount = suballocations1st.size();
10571 const size_t suballoc2ndCount = suballocations2nd.size();
10582 VkDeviceSize lastOffset = 0;
10584 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10586 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10587 size_t nextAlloc2ndIndex = 0;
10588 while(lastOffset < freeSpace2ndTo1stEnd)
10591 while(nextAlloc2ndIndex < suballoc2ndCount &&
10592 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10594 ++nextAlloc2ndIndex;
10598 if(nextAlloc2ndIndex < suballoc2ndCount)
10600 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10603 if(lastOffset < suballoc.offset)
10606 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10620 lastOffset = suballoc.offset + suballoc.size;
10621 ++nextAlloc2ndIndex;
10627 if(lastOffset < freeSpace2ndTo1stEnd)
10629 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10637 lastOffset = freeSpace2ndTo1stEnd;
10642 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10643 const VkDeviceSize freeSpace1stTo2ndEnd =
10644 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10645 while(lastOffset < freeSpace1stTo2ndEnd)
10648 while(nextAlloc1stIndex < suballoc1stCount &&
10649 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10651 ++nextAlloc1stIndex;
10655 if(nextAlloc1stIndex < suballoc1stCount)
10657 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10660 if(lastOffset < suballoc.offset)
10663 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10677 lastOffset = suballoc.offset + suballoc.size;
10678 ++nextAlloc1stIndex;
10684 if(lastOffset < freeSpace1stTo2ndEnd)
10686 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10694 lastOffset = freeSpace1stTo2ndEnd;
10698 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10700 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10701 while(lastOffset < size)
10704 while(nextAlloc2ndIndex != SIZE_MAX &&
10705 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10707 --nextAlloc2ndIndex;
10711 if(nextAlloc2ndIndex != SIZE_MAX)
10713 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10716 if(lastOffset < suballoc.offset)
10719 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10733 lastOffset = suballoc.offset + suballoc.size;
10734 --nextAlloc2ndIndex;
10740 if(lastOffset < size)
10742 const VkDeviceSize unusedRangeSize = size - lastOffset;
10758 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10760 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10761 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10762 const VkDeviceSize size = GetSize();
10763 const size_t suballoc1stCount = suballocations1st.size();
10764 const size_t suballoc2ndCount = suballocations2nd.size();
10766 inoutStats.
size += size;
10768 VkDeviceSize lastOffset = 0;
10770 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10772 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10773 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10774 while(lastOffset < freeSpace2ndTo1stEnd)
10777 while(nextAlloc2ndIndex < suballoc2ndCount &&
10778 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10780 ++nextAlloc2ndIndex;
10784 if(nextAlloc2ndIndex < suballoc2ndCount)
10786 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10789 if(lastOffset < suballoc.offset)
10792 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10803 lastOffset = suballoc.offset + suballoc.size;
10804 ++nextAlloc2ndIndex;
10809 if(lastOffset < freeSpace2ndTo1stEnd)
10812 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10819 lastOffset = freeSpace2ndTo1stEnd;
10824 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10825 const VkDeviceSize freeSpace1stTo2ndEnd =
10826 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10827 while(lastOffset < freeSpace1stTo2ndEnd)
10830 while(nextAlloc1stIndex < suballoc1stCount &&
10831 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10833 ++nextAlloc1stIndex;
10837 if(nextAlloc1stIndex < suballoc1stCount)
10839 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10842 if(lastOffset < suballoc.offset)
10845 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10856 lastOffset = suballoc.offset + suballoc.size;
10857 ++nextAlloc1stIndex;
10862 if(lastOffset < freeSpace1stTo2ndEnd)
10865 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10872 lastOffset = freeSpace1stTo2ndEnd;
10876 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10878 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10879 while(lastOffset < size)
10882 while(nextAlloc2ndIndex != SIZE_MAX &&
10883 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10885 --nextAlloc2ndIndex;
10889 if(nextAlloc2ndIndex != SIZE_MAX)
10891 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10894 if(lastOffset < suballoc.offset)
10897 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10908 lastOffset = suballoc.offset + suballoc.size;
10909 --nextAlloc2ndIndex;
10914 if(lastOffset < size)
10917 const VkDeviceSize unusedRangeSize = size - lastOffset;
10930 #if VMA_STATS_STRING_ENABLED
10931 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10933 const VkDeviceSize size = GetSize();
10934 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10935 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10936 const size_t suballoc1stCount = suballocations1st.size();
10937 const size_t suballoc2ndCount = suballocations2nd.size();
10941 size_t unusedRangeCount = 0;
10942 VkDeviceSize usedBytes = 0;
10944 VkDeviceSize lastOffset = 0;
10946 size_t alloc2ndCount = 0;
10947 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10949 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10950 size_t nextAlloc2ndIndex = 0;
10951 while(lastOffset < freeSpace2ndTo1stEnd)
10954 while(nextAlloc2ndIndex < suballoc2ndCount &&
10955 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10957 ++nextAlloc2ndIndex;
10961 if(nextAlloc2ndIndex < suballoc2ndCount)
10963 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10966 if(lastOffset < suballoc.offset)
10969 ++unusedRangeCount;
10975 usedBytes += suballoc.size;
10978 lastOffset = suballoc.offset + suballoc.size;
10979 ++nextAlloc2ndIndex;
10984 if(lastOffset < freeSpace2ndTo1stEnd)
10987 ++unusedRangeCount;
10991 lastOffset = freeSpace2ndTo1stEnd;
10996 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10997 size_t alloc1stCount = 0;
10998 const VkDeviceSize freeSpace1stTo2ndEnd =
10999 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
11000 while(lastOffset < freeSpace1stTo2ndEnd)
11003 while(nextAlloc1stIndex < suballoc1stCount &&
11004 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11006 ++nextAlloc1stIndex;
11010 if(nextAlloc1stIndex < suballoc1stCount)
11012 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11015 if(lastOffset < suballoc.offset)
11018 ++unusedRangeCount;
11024 usedBytes += suballoc.size;
11027 lastOffset = suballoc.offset + suballoc.size;
11028 ++nextAlloc1stIndex;
11033 if(lastOffset < size)
11036 ++unusedRangeCount;
11040 lastOffset = freeSpace1stTo2ndEnd;
11044 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11046 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11047 while(lastOffset < size)
11050 while(nextAlloc2ndIndex != SIZE_MAX &&
11051 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11053 --nextAlloc2ndIndex;
11057 if(nextAlloc2ndIndex != SIZE_MAX)
11059 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11062 if(lastOffset < suballoc.offset)
11065 ++unusedRangeCount;
11071 usedBytes += suballoc.size;
11074 lastOffset = suballoc.offset + suballoc.size;
11075 --nextAlloc2ndIndex;
11080 if(lastOffset < size)
11083 ++unusedRangeCount;
11092 const VkDeviceSize unusedBytes = size - usedBytes;
11093 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
11098 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11100 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
11101 size_t nextAlloc2ndIndex = 0;
11102 while(lastOffset < freeSpace2ndTo1stEnd)
11105 while(nextAlloc2ndIndex < suballoc2ndCount &&
11106 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11108 ++nextAlloc2ndIndex;
11112 if(nextAlloc2ndIndex < suballoc2ndCount)
11114 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11117 if(lastOffset < suballoc.offset)
11120 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11121 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11126 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11129 lastOffset = suballoc.offset + suballoc.size;
11130 ++nextAlloc2ndIndex;
11135 if(lastOffset < freeSpace2ndTo1stEnd)
11138 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
11139 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11143 lastOffset = freeSpace2ndTo1stEnd;
11148 nextAlloc1stIndex = m_1stNullItemsBeginCount;
11149 while(lastOffset < freeSpace1stTo2ndEnd)
11152 while(nextAlloc1stIndex < suballoc1stCount &&
11153 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
11155 ++nextAlloc1stIndex;
11159 if(nextAlloc1stIndex < suballoc1stCount)
11161 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
11164 if(lastOffset < suballoc.offset)
11167 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11168 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11173 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11176 lastOffset = suballoc.offset + suballoc.size;
11177 ++nextAlloc1stIndex;
11182 if(lastOffset < freeSpace1stTo2ndEnd)
11185 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
11186 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11190 lastOffset = freeSpace1stTo2ndEnd;
11194 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11196 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
11197 while(lastOffset < size)
11200 while(nextAlloc2ndIndex != SIZE_MAX &&
11201 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
11203 --nextAlloc2ndIndex;
11207 if(nextAlloc2ndIndex != SIZE_MAX)
11209 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
11212 if(lastOffset < suballoc.offset)
11215 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
11216 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11221 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
11224 lastOffset = suballoc.offset + suballoc.size;
11225 --nextAlloc2ndIndex;
11230 if(lastOffset < size)
11233 const VkDeviceSize unusedRangeSize = size - lastOffset;
11234 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
11243 PrintDetailedMap_End(json);
11247 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
11248 uint32_t currentFrameIndex,
11249 uint32_t frameInUseCount,
11250 VkDeviceSize bufferImageGranularity,
11251 VkDeviceSize allocSize,
11252 VkDeviceSize allocAlignment,
11254 VmaSuballocationType allocType,
11255 bool canMakeOtherLost,
11257 VmaAllocationRequest* pAllocationRequest)
11259 VMA_ASSERT(allocSize > 0);
11260 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
11261 VMA_ASSERT(pAllocationRequest != VMA_NULL);
11262 VMA_HEAVY_ASSERT(Validate());
11263 return upperAddress ?
11264 CreateAllocationRequest_UpperAddress(
11265 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11266 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
11267 CreateAllocationRequest_LowerAddress(
11268 currentFrameIndex, frameInUseCount, bufferImageGranularity,
11269 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
11272 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
11273 uint32_t currentFrameIndex,
11274 uint32_t frameInUseCount,
11275 VkDeviceSize bufferImageGranularity,
11276 VkDeviceSize allocSize,
11277 VkDeviceSize allocAlignment,
11278 VmaSuballocationType allocType,
11279 bool canMakeOtherLost,
11281 VmaAllocationRequest* pAllocationRequest)
11283 const VkDeviceSize size = GetSize();
11284 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11285 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11287 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11289 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
11294 if(allocSize > size)
11298 VkDeviceSize resultBaseOffset = size - allocSize;
11299 if(!suballocations2nd.empty())
11301 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11302 resultBaseOffset = lastSuballoc.offset - allocSize;
11303 if(allocSize > lastSuballoc.offset)
11310 VkDeviceSize resultOffset = resultBaseOffset;
11313 if(VMA_DEBUG_MARGIN > 0)
11315 if(resultOffset < VMA_DEBUG_MARGIN)
11319 resultOffset -= VMA_DEBUG_MARGIN;
11323 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
11327 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11329 bool bufferImageGranularityConflict =
false;
11330 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11332 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11333 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11335 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
11337 bufferImageGranularityConflict =
true;
11345 if(bufferImageGranularityConflict)
11347 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
11352 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
11353 suballocations1st.back().offset + suballocations1st.back().size :
11355 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
11359 if(bufferImageGranularity > 1)
11361 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11363 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11364 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11366 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11380 pAllocationRequest->offset = resultOffset;
11381 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11382 pAllocationRequest->sumItemSize = 0;
11384 pAllocationRequest->itemsToMakeLostCount = 0;
11385 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11392 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11393 uint32_t currentFrameIndex,
11394 uint32_t frameInUseCount,
11395 VkDeviceSize bufferImageGranularity,
11396 VkDeviceSize allocSize,
11397 VkDeviceSize allocAlignment,
11398 VmaSuballocationType allocType,
11399 bool canMakeOtherLost,
11401 VmaAllocationRequest* pAllocationRequest)
11403 const VkDeviceSize size = GetSize();
11404 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11405 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11407 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11411 VkDeviceSize resultBaseOffset = 0;
11412 if(!suballocations1st.empty())
11414 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11415 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11419 VkDeviceSize resultOffset = resultBaseOffset;
11422 if(VMA_DEBUG_MARGIN > 0)
11424 resultOffset += VMA_DEBUG_MARGIN;
11428 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11432 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
11434 bool bufferImageGranularityConflict =
false;
11435 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11437 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11438 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11440 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11442 bufferImageGranularityConflict =
true;
11450 if(bufferImageGranularityConflict)
11452 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11456 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11457 suballocations2nd.back().offset : size;
11460 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11464 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11466 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11468 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11469 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11471 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11485 pAllocationRequest->offset = resultOffset;
11486 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11487 pAllocationRequest->sumItemSize = 0;
11489 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11490 pAllocationRequest->itemsToMakeLostCount = 0;
11497 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11499 VMA_ASSERT(!suballocations1st.empty());
11501 VkDeviceSize resultBaseOffset = 0;
11502 if(!suballocations2nd.empty())
11504 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11505 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11509 VkDeviceSize resultOffset = resultBaseOffset;
11512 if(VMA_DEBUG_MARGIN > 0)
11514 resultOffset += VMA_DEBUG_MARGIN;
11518 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11522 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
11524 bool bufferImageGranularityConflict =
false;
11525 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11527 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11528 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11530 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11532 bufferImageGranularityConflict =
true;
11540 if(bufferImageGranularityConflict)
11542 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11546 pAllocationRequest->itemsToMakeLostCount = 0;
11547 pAllocationRequest->sumItemSize = 0;
11548 size_t index1st = m_1stNullItemsBeginCount;
11550 if(canMakeOtherLost)
11552 while(index1st < suballocations1st.size() &&
11553 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11556 const VmaSuballocation& suballoc = suballocations1st[index1st];
11557 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11563 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11564 if(suballoc.hAllocation->CanBecomeLost() &&
11565 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11567 ++pAllocationRequest->itemsToMakeLostCount;
11568 pAllocationRequest->sumItemSize += suballoc.size;
11580 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11582 while(index1st < suballocations1st.size())
11584 const VmaSuballocation& suballoc = suballocations1st[index1st];
11585 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11587 if(suballoc.hAllocation != VK_NULL_HANDLE)
11590 if(suballoc.hAllocation->CanBecomeLost() &&
11591 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11593 ++pAllocationRequest->itemsToMakeLostCount;
11594 pAllocationRequest->sumItemSize += suballoc.size;
11612 if(index1st == suballocations1st.size() &&
11613 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11616 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11621 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11622 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11626 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
11628 for(
size_t nextSuballocIndex = index1st;
11629 nextSuballocIndex < suballocations1st.size();
11630 nextSuballocIndex++)
11632 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11633 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11635 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11649 pAllocationRequest->offset = resultOffset;
11650 pAllocationRequest->sumFreeSize =
11651 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11653 - pAllocationRequest->sumItemSize;
11654 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11663 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11664 uint32_t currentFrameIndex,
11665 uint32_t frameInUseCount,
11666 VmaAllocationRequest* pAllocationRequest)
11668 if(pAllocationRequest->itemsToMakeLostCount == 0)
11673 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11676 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11677 size_t index = m_1stNullItemsBeginCount;
11678 size_t madeLostCount = 0;
11679 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11681 if(index == suballocations->size())
11685 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11687 suballocations = &AccessSuballocations2nd();
11691 VMA_ASSERT(!suballocations->empty());
11693 VmaSuballocation& suballoc = (*suballocations)[index];
11694 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11696 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11697 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11698 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11700 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11701 suballoc.hAllocation = VK_NULL_HANDLE;
11702 m_SumFreeSize += suballoc.size;
11703 if(suballocations == &AccessSuballocations1st())
11705 ++m_1stNullItemsMiddleCount;
11709 ++m_2ndNullItemsCount;
11721 CleanupAfterFree();
11727 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11729 uint32_t lostAllocationCount = 0;
11731 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11732 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11734 VmaSuballocation& suballoc = suballocations1st[i];
11735 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11736 suballoc.hAllocation->CanBecomeLost() &&
11737 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11739 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11740 suballoc.hAllocation = VK_NULL_HANDLE;
11741 ++m_1stNullItemsMiddleCount;
11742 m_SumFreeSize += suballoc.size;
11743 ++lostAllocationCount;
11747 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11748 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11750 VmaSuballocation& suballoc = suballocations2nd[i];
11751 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11752 suballoc.hAllocation->CanBecomeLost() &&
11753 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11755 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11756 suballoc.hAllocation = VK_NULL_HANDLE;
11757 ++m_2ndNullItemsCount;
11758 m_SumFreeSize += suballoc.size;
11759 ++lostAllocationCount;
11763 if(lostAllocationCount)
11765 CleanupAfterFree();
11768 return lostAllocationCount;
11771 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11773 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11774 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11776 const VmaSuballocation& suballoc = suballocations1st[i];
11777 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11779 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11781 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11782 return VK_ERROR_VALIDATION_FAILED_EXT;
11784 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11786 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11787 return VK_ERROR_VALIDATION_FAILED_EXT;
11792 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11793 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11795 const VmaSuballocation& suballoc = suballocations2nd[i];
11796 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11798 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11800 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11801 return VK_ERROR_VALIDATION_FAILED_EXT;
11803 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11805 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11806 return VK_ERROR_VALIDATION_FAILED_EXT;
11814 void VmaBlockMetadata_Linear::Alloc(
11815 const VmaAllocationRequest& request,
11816 VmaSuballocationType type,
11817 VkDeviceSize allocSize,
11820 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11822 switch(request.type)
11824 case VmaAllocationRequestType::UpperAddress:
11826 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11827 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11828 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11829 suballocations2nd.push_back(newSuballoc);
11830 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11833 case VmaAllocationRequestType::EndOf1st:
11835 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11837 VMA_ASSERT(suballocations1st.empty() ||
11838 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11840 VMA_ASSERT(request.offset + allocSize <= GetSize());
11842 suballocations1st.push_back(newSuballoc);
11845 case VmaAllocationRequestType::EndOf2nd:
11847 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11849 VMA_ASSERT(!suballocations1st.empty() &&
11850 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11851 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11853 switch(m_2ndVectorMode)
11855 case SECOND_VECTOR_EMPTY:
11857 VMA_ASSERT(suballocations2nd.empty());
11858 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11860 case SECOND_VECTOR_RING_BUFFER:
11862 VMA_ASSERT(!suballocations2nd.empty());
11864 case SECOND_VECTOR_DOUBLE_STACK:
11865 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11871 suballocations2nd.push_back(newSuballoc);
11875 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11878 m_SumFreeSize -= newSuballoc.size;
11881 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11883 FreeAtOffset(allocation->GetOffset());
11886 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11888 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11889 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11891 if(!suballocations1st.empty())
11894 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11895 if(firstSuballoc.offset == offset)
11897 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11898 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11899 m_SumFreeSize += firstSuballoc.size;
11900 ++m_1stNullItemsBeginCount;
11901 CleanupAfterFree();
11907 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11908 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11910 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11911 if(lastSuballoc.offset == offset)
11913 m_SumFreeSize += lastSuballoc.size;
11914 suballocations2nd.pop_back();
11915 CleanupAfterFree();
11920 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11922 VmaSuballocation& lastSuballoc = suballocations1st.back();
11923 if(lastSuballoc.offset == offset)
11925 m_SumFreeSize += lastSuballoc.size;
11926 suballocations1st.pop_back();
11927 CleanupAfterFree();
11934 VmaSuballocation refSuballoc;
11935 refSuballoc.offset = offset;
11937 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11938 suballocations1st.begin() + m_1stNullItemsBeginCount,
11939 suballocations1st.end(),
11941 VmaSuballocationOffsetLess());
11942 if(it != suballocations1st.end())
11944 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11945 it->hAllocation = VK_NULL_HANDLE;
11946 ++m_1stNullItemsMiddleCount;
11947 m_SumFreeSize += it->size;
11948 CleanupAfterFree();
11953 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11956 VmaSuballocation refSuballoc;
11957 refSuballoc.offset = offset;
11959 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11960 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11961 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11962 if(it != suballocations2nd.end())
11964 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11965 it->hAllocation = VK_NULL_HANDLE;
11966 ++m_2ndNullItemsCount;
11967 m_SumFreeSize += it->size;
11968 CleanupAfterFree();
11973 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11976 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11978 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11979 const size_t suballocCount = AccessSuballocations1st().size();
11980 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11983 void VmaBlockMetadata_Linear::CleanupAfterFree()
11985 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11986 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11990 suballocations1st.clear();
11991 suballocations2nd.clear();
11992 m_1stNullItemsBeginCount = 0;
11993 m_1stNullItemsMiddleCount = 0;
11994 m_2ndNullItemsCount = 0;
11995 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11999 const size_t suballoc1stCount = suballocations1st.size();
12000 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
12001 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
12004 while(m_1stNullItemsBeginCount < suballoc1stCount &&
12005 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12007 ++m_1stNullItemsBeginCount;
12008 --m_1stNullItemsMiddleCount;
12012 while(m_1stNullItemsMiddleCount > 0 &&
12013 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
12015 --m_1stNullItemsMiddleCount;
12016 suballocations1st.pop_back();
12020 while(m_2ndNullItemsCount > 0 &&
12021 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
12023 --m_2ndNullItemsCount;
12024 suballocations2nd.pop_back();
12028 while(m_2ndNullItemsCount > 0 &&
12029 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
12031 --m_2ndNullItemsCount;
12032 VmaVectorRemove(suballocations2nd, 0);
12035 if(ShouldCompact1st())
12037 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
12038 size_t srcIndex = m_1stNullItemsBeginCount;
12039 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
12041 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
12045 if(dstIndex != srcIndex)
12047 suballocations1st[dstIndex] = suballocations1st[srcIndex];
12051 suballocations1st.resize(nonNullItemCount);
12052 m_1stNullItemsBeginCount = 0;
12053 m_1stNullItemsMiddleCount = 0;
12057 if(suballocations2nd.empty())
12059 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12063 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
12065 suballocations1st.clear();
12066 m_1stNullItemsBeginCount = 0;
12068 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
12071 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
12072 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
12073 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
12074 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
12076 ++m_1stNullItemsBeginCount;
12077 --m_1stNullItemsMiddleCount;
12079 m_2ndNullItemsCount = 0;
12080 m_1stVectorIndex ^= 1;
12085 VMA_HEAVY_ASSERT(Validate());
12092 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
12093 VmaBlockMetadata(hAllocator),
12095 m_AllocationCount(0),
12099 memset(m_FreeList, 0,
sizeof(m_FreeList));
12102 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
12104 DeleteNode(m_Root);
12107 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
12109 VmaBlockMetadata::Init(size);
12111 m_UsableSize = VmaPrevPow2(size);
12112 m_SumFreeSize = m_UsableSize;
12116 while(m_LevelCount < MAX_LEVELS &&
12117 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
12122 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
12123 rootNode->offset = 0;
12124 rootNode->type = Node::TYPE_FREE;
12125 rootNode->parent = VMA_NULL;
12126 rootNode->buddy = VMA_NULL;
12129 AddToFreeListFront(0, rootNode);
12132 bool VmaBlockMetadata_Buddy::Validate()
const
12135 ValidationContext ctx;
12136 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
12138 VMA_VALIDATE(
false &&
"ValidateNode failed.");
12140 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
12141 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
12144 for(uint32_t level = 0; level < m_LevelCount; ++level)
12146 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
12147 m_FreeList[level].front->free.prev == VMA_NULL);
12149 for(Node* node = m_FreeList[level].front;
12151 node = node->free.next)
12153 VMA_VALIDATE(node->type == Node::TYPE_FREE);
12155 if(node->free.next == VMA_NULL)
12157 VMA_VALIDATE(m_FreeList[level].back == node);
12161 VMA_VALIDATE(node->free.next->free.prev == node);
12167 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
12169 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
12175 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
12177 for(uint32_t level = 0; level < m_LevelCount; ++level)
12179 if(m_FreeList[level].front != VMA_NULL)
12181 return LevelToNodeSize(level);
12187 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
12189 const VkDeviceSize unusableSize = GetUnusableSize();
12200 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
12202 if(unusableSize > 0)
12211 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
12213 const VkDeviceSize unusableSize = GetUnusableSize();
12215 inoutStats.
size += GetSize();
12216 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
12221 if(unusableSize > 0)
12228 #if VMA_STATS_STRING_ENABLED
12230 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
12234 CalcAllocationStatInfo(stat);
12236 PrintDetailedMap_Begin(
12242 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
12244 const VkDeviceSize unusableSize = GetUnusableSize();
12245 if(unusableSize > 0)
12247 PrintDetailedMap_UnusedRange(json,
12252 PrintDetailedMap_End(json);
12257 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
12258 uint32_t currentFrameIndex,
12259 uint32_t frameInUseCount,
12260 VkDeviceSize bufferImageGranularity,
12261 VkDeviceSize allocSize,
12262 VkDeviceSize allocAlignment,
12264 VmaSuballocationType allocType,
12265 bool canMakeOtherLost,
12267 VmaAllocationRequest* pAllocationRequest)
12269 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
12273 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
12274 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
12275 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
12277 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
12278 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
12281 if(allocSize > m_UsableSize)
12286 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12287 for(uint32_t level = targetLevel + 1; level--; )
12289 for(Node* freeNode = m_FreeList[level].front;
12290 freeNode != VMA_NULL;
12291 freeNode = freeNode->free.next)
12293 if(freeNode->offset % allocAlignment == 0)
12295 pAllocationRequest->type = VmaAllocationRequestType::Normal;
12296 pAllocationRequest->offset = freeNode->offset;
12297 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
12298 pAllocationRequest->sumItemSize = 0;
12299 pAllocationRequest->itemsToMakeLostCount = 0;
12300 pAllocationRequest->customData = (
void*)(uintptr_t)level;
12309 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
12310 uint32_t currentFrameIndex,
12311 uint32_t frameInUseCount,
12312 VmaAllocationRequest* pAllocationRequest)
12318 return pAllocationRequest->itemsToMakeLostCount == 0;
12321 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
12330 void VmaBlockMetadata_Buddy::Alloc(
12331 const VmaAllocationRequest& request,
12332 VmaSuballocationType type,
12333 VkDeviceSize allocSize,
12336 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
12338 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
12339 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
12341 Node* currNode = m_FreeList[currLevel].front;
12342 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12343 while(currNode->offset != request.offset)
12345 currNode = currNode->free.next;
12346 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
12350 while(currLevel < targetLevel)
12354 RemoveFromFreeList(currLevel, currNode);
12356 const uint32_t childrenLevel = currLevel + 1;
12359 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12360 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12362 leftChild->offset = currNode->offset;
12363 leftChild->type = Node::TYPE_FREE;
12364 leftChild->parent = currNode;
12365 leftChild->buddy = rightChild;
12367 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12368 rightChild->type = Node::TYPE_FREE;
12369 rightChild->parent = currNode;
12370 rightChild->buddy = leftChild;
12373 currNode->type = Node::TYPE_SPLIT;
12374 currNode->split.leftChild = leftChild;
12377 AddToFreeListFront(childrenLevel, rightChild);
12378 AddToFreeListFront(childrenLevel, leftChild);
12383 currNode = m_FreeList[currLevel].front;
12392 VMA_ASSERT(currLevel == targetLevel &&
12393 currNode != VMA_NULL &&
12394 currNode->type == Node::TYPE_FREE);
12395 RemoveFromFreeList(currLevel, currNode);
12398 currNode->type = Node::TYPE_ALLOCATION;
12399 currNode->allocation.alloc = hAllocation;
12401 ++m_AllocationCount;
12403 m_SumFreeSize -= allocSize;
12406 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12408 if(node->type == Node::TYPE_SPLIT)
12410 DeleteNode(node->split.leftChild->buddy);
12411 DeleteNode(node->split.leftChild);
12414 vma_delete(GetAllocationCallbacks(), node);
12417 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12419 VMA_VALIDATE(level < m_LevelCount);
12420 VMA_VALIDATE(curr->parent == parent);
12421 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12422 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12425 case Node::TYPE_FREE:
12427 ctx.calculatedSumFreeSize += levelNodeSize;
12428 ++ctx.calculatedFreeCount;
12430 case Node::TYPE_ALLOCATION:
12431 ++ctx.calculatedAllocationCount;
12432 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12433 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12435 case Node::TYPE_SPLIT:
12437 const uint32_t childrenLevel = level + 1;
12438 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12439 const Node*
const leftChild = curr->split.leftChild;
12440 VMA_VALIDATE(leftChild != VMA_NULL);
12441 VMA_VALIDATE(leftChild->offset == curr->offset);
12442 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12444 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12446 const Node*
const rightChild = leftChild->buddy;
12447 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12448 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12450 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12461 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12464 uint32_t level = 0;
12465 VkDeviceSize currLevelNodeSize = m_UsableSize;
12466 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12467 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12470 currLevelNodeSize = nextLevelNodeSize;
12471 nextLevelNodeSize = currLevelNodeSize >> 1;
12476 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12479 Node* node = m_Root;
12480 VkDeviceSize nodeOffset = 0;
12481 uint32_t level = 0;
12482 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12483 while(node->type == Node::TYPE_SPLIT)
12485 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12486 if(offset < nodeOffset + nextLevelSize)
12488 node = node->split.leftChild;
12492 node = node->split.leftChild->buddy;
12493 nodeOffset += nextLevelSize;
12496 levelNodeSize = nextLevelSize;
12499 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12500 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12503 --m_AllocationCount;
12504 m_SumFreeSize += alloc->GetSize();
12506 node->type = Node::TYPE_FREE;
12509 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12511 RemoveFromFreeList(level, node->buddy);
12512 Node*
const parent = node->parent;
12514 vma_delete(GetAllocationCallbacks(), node->buddy);
12515 vma_delete(GetAllocationCallbacks(), node);
12516 parent->type = Node::TYPE_FREE;
12524 AddToFreeListFront(level, node);
12527 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12531 case Node::TYPE_FREE:
12537 case Node::TYPE_ALLOCATION:
12539 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12545 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12546 if(unusedRangeSize > 0)
12555 case Node::TYPE_SPLIT:
12557 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12558 const Node*
const leftChild = node->split.leftChild;
12559 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12560 const Node*
const rightChild = leftChild->buddy;
12561 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12569 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12571 VMA_ASSERT(node->type == Node::TYPE_FREE);
12574 Node*
const frontNode = m_FreeList[level].front;
12575 if(frontNode == VMA_NULL)
12577 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12578 node->free.prev = node->free.next = VMA_NULL;
12579 m_FreeList[level].front = m_FreeList[level].back = node;
12583 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12584 node->free.prev = VMA_NULL;
12585 node->free.next = frontNode;
12586 frontNode->free.prev = node;
12587 m_FreeList[level].front = node;
12591 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12593 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12596 if(node->free.prev == VMA_NULL)
12598 VMA_ASSERT(m_FreeList[level].front == node);
12599 m_FreeList[level].front = node->free.next;
12603 Node*
const prevFreeNode = node->free.prev;
12604 VMA_ASSERT(prevFreeNode->free.next == node);
12605 prevFreeNode->free.next = node->free.next;
12609 if(node->free.next == VMA_NULL)
12611 VMA_ASSERT(m_FreeList[level].back == node);
12612 m_FreeList[level].back = node->free.prev;
12616 Node*
const nextFreeNode = node->free.next;
12617 VMA_ASSERT(nextFreeNode->free.prev == node);
12618 nextFreeNode->free.prev = node->free.prev;
12622 #if VMA_STATS_STRING_ENABLED
12623 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12627 case Node::TYPE_FREE:
12628 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12630 case Node::TYPE_ALLOCATION:
12632 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12633 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12634 if(allocSize < levelNodeSize)
12636 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12640 case Node::TYPE_SPLIT:
12642 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12643 const Node*
const leftChild = node->split.leftChild;
12644 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12645 const Node*
const rightChild = leftChild->buddy;
12646 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12659 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12660 m_pMetadata(VMA_NULL),
12661 m_MemoryTypeIndex(UINT32_MAX),
12663 m_hMemory(VK_NULL_HANDLE),
12665 m_pMappedData(VMA_NULL)
12669 void VmaDeviceMemoryBlock::Init(
12672 uint32_t newMemoryTypeIndex,
12673 VkDeviceMemory newMemory,
12674 VkDeviceSize newSize,
12676 uint32_t algorithm)
12678 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12680 m_hParentPool = hParentPool;
12681 m_MemoryTypeIndex = newMemoryTypeIndex;
12683 m_hMemory = newMemory;
12688 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12691 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12697 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12699 m_pMetadata->Init(newSize);
12702 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12706 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12708 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12709 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12710 m_hMemory = VK_NULL_HANDLE;
12712 vma_delete(allocator, m_pMetadata);
12713 m_pMetadata = VMA_NULL;
12716 bool VmaDeviceMemoryBlock::Validate()
const
12718 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12719 (m_pMetadata->GetSize() != 0));
12721 return m_pMetadata->Validate();
12724 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12726 void* pData =
nullptr;
12727 VkResult res = Map(hAllocator, 1, &pData);
12728 if(res != VK_SUCCESS)
12733 res = m_pMetadata->CheckCorruption(pData);
12735 Unmap(hAllocator, 1);
12740 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12747 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12748 if(m_MapCount != 0)
12750 m_MapCount += count;
12751 VMA_ASSERT(m_pMappedData != VMA_NULL);
12752 if(ppData != VMA_NULL)
12754 *ppData = m_pMappedData;
12760 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12761 hAllocator->m_hDevice,
12767 if(result == VK_SUCCESS)
12769 if(ppData != VMA_NULL)
12771 *ppData = m_pMappedData;
12773 m_MapCount = count;
12779 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12786 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12787 if(m_MapCount >= count)
12789 m_MapCount -= count;
12790 if(m_MapCount == 0)
12792 m_pMappedData = VMA_NULL;
12793 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12798 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12802 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12804 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12805 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12808 VkResult res = Map(hAllocator, 1, &pData);
12809 if(res != VK_SUCCESS)
12814 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12815 VmaWriteMagicValue(pData, allocOffset + allocSize);
12817 Unmap(hAllocator, 1);
12822 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12824 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12825 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12828 VkResult res = Map(hAllocator, 1, &pData);
12829 if(res != VK_SUCCESS)
12834 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12836 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12838 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12840 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12843 Unmap(hAllocator, 1);
12848 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12851 VkDeviceSize allocationLocalOffset,
12855 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12856 hAllocation->GetBlock() ==
this);
12857 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12858 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12859 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12861 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12862 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12865 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12868 VkDeviceSize allocationLocalOffset,
12872 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12873 hAllocation->GetBlock() ==
this);
12874 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12875 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12876 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12878 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12879 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12884 memset(&outInfo, 0,
sizeof(outInfo));
12903 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12911 VmaPool_T::VmaPool_T(
12914 VkDeviceSize preferredBlockSize) :
12918 createInfo.memoryTypeIndex,
12919 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12920 createInfo.minBlockCount,
12921 createInfo.maxBlockCount,
12923 createInfo.frameInUseCount,
12924 createInfo.blockSize != 0,
12926 createInfo.priority,
12927 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment),
12928 createInfo.pMemoryAllocateNext),
12934 VmaPool_T::~VmaPool_T()
12936 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
12939 void VmaPool_T::SetName(
const char* pName)
12941 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12942 VmaFreeString(allocs, m_Name);
12944 if(pName != VMA_NULL)
12946 m_Name = VmaCreateStringCopy(allocs, pName);
12954 #if VMA_STATS_STRING_ENABLED
12958 VmaBlockVector::VmaBlockVector(
12961 uint32_t memoryTypeIndex,
12962 VkDeviceSize preferredBlockSize,
12963 size_t minBlockCount,
12964 size_t maxBlockCount,
12965 VkDeviceSize bufferImageGranularity,
12966 uint32_t frameInUseCount,
12967 bool explicitBlockSize,
12968 uint32_t algorithm,
12970 VkDeviceSize minAllocationAlignment,
12971 void* pMemoryAllocateNext) :
12972 m_hAllocator(hAllocator),
12973 m_hParentPool(hParentPool),
12974 m_MemoryTypeIndex(memoryTypeIndex),
12975 m_PreferredBlockSize(preferredBlockSize),
12976 m_MinBlockCount(minBlockCount),
12977 m_MaxBlockCount(maxBlockCount),
12978 m_BufferImageGranularity(bufferImageGranularity),
12979 m_FrameInUseCount(frameInUseCount),
12980 m_ExplicitBlockSize(explicitBlockSize),
12981 m_Algorithm(algorithm),
12982 m_Priority(priority),
12983 m_MinAllocationAlignment(minAllocationAlignment),
12984 m_pMemoryAllocateNext(pMemoryAllocateNext),
12985 m_HasEmptyBlock(false),
12986 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12991 VmaBlockVector::~VmaBlockVector()
12993 for(
size_t i = m_Blocks.size(); i--; )
12995 m_Blocks[i]->Destroy(m_hAllocator);
12996 vma_delete(m_hAllocator, m_Blocks[i]);
13000 VkResult VmaBlockVector::CreateMinBlocks()
13002 for(
size_t i = 0; i < m_MinBlockCount; ++i)
13004 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
13005 if(res != VK_SUCCESS)
13013 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
13015 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13017 const size_t blockCount = m_Blocks.size();
13026 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13028 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13029 VMA_ASSERT(pBlock);
13030 VMA_HEAVY_ASSERT(pBlock->Validate());
13031 pBlock->m_pMetadata->AddPoolStats(*pStats);
13035 bool VmaBlockVector::IsEmpty()
13037 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13038 return m_Blocks.empty();
13041 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
13043 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13044 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
13045 (VMA_DEBUG_MARGIN > 0) &&
13047 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
13050 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
13052 VkResult VmaBlockVector::Allocate(
13053 uint32_t currentFrameIndex,
13055 VkDeviceSize alignment,
13057 VmaSuballocationType suballocType,
13058 size_t allocationCount,
13062 VkResult res = VK_SUCCESS;
13064 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
13066 if(IsCorruptionDetectionEnabled())
13068 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13069 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
13073 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13074 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13076 res = AllocatePage(
13082 pAllocations + allocIndex);
13083 if(res != VK_SUCCESS)
13090 if(res != VK_SUCCESS)
13093 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13094 while(allocIndex--)
13096 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
13097 const VkDeviceSize allocSize = alloc->GetSize();
13099 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
13101 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
13107 VkResult VmaBlockVector::AllocatePage(
13108 uint32_t currentFrameIndex,
13110 VkDeviceSize alignment,
13112 VmaSuballocationType suballocType,
13120 VkDeviceSize freeMemory;
13122 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13124 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13128 const bool canFallbackToDedicated = !IsCustomPool();
13129 const bool canCreateNewBlock =
13131 (m_Blocks.size() < m_MaxBlockCount) &&
13132 (freeMemory >= size || !canFallbackToDedicated);
13139 canMakeOtherLost =
false;
13143 if(isUpperAddress &&
13146 return VK_ERROR_FEATURE_NOT_PRESENT;
13160 return VK_ERROR_FEATURE_NOT_PRESENT;
13164 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
13166 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13174 if(!canMakeOtherLost || canCreateNewBlock)
13183 if(!m_Blocks.empty())
13185 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
13186 VMA_ASSERT(pCurrBlock);
13187 VkResult res = AllocateFromBlock(
13197 if(res == VK_SUCCESS)
13199 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
13209 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13211 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13212 VMA_ASSERT(pCurrBlock);
13213 VkResult res = AllocateFromBlock(
13223 if(res == VK_SUCCESS)
13225 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13233 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13235 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13236 VMA_ASSERT(pCurrBlock);
13237 VkResult res = AllocateFromBlock(
13247 if(res == VK_SUCCESS)
13249 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
13257 if(canCreateNewBlock)
13260 VkDeviceSize newBlockSize = m_PreferredBlockSize;
13261 uint32_t newBlockSizeShift = 0;
13262 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
13264 if(!m_ExplicitBlockSize)
13267 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
13268 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
13270 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13271 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
13273 newBlockSize = smallerNewBlockSize;
13274 ++newBlockSizeShift;
13283 size_t newBlockIndex = 0;
13284 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13285 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13287 if(!m_ExplicitBlockSize)
13289 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
13291 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
13292 if(smallerNewBlockSize >= size)
13294 newBlockSize = smallerNewBlockSize;
13295 ++newBlockSizeShift;
13296 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
13297 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
13306 if(res == VK_SUCCESS)
13308 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
13309 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
13311 res = AllocateFromBlock(
13321 if(res == VK_SUCCESS)
13323 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
13329 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13336 if(canMakeOtherLost)
13338 uint32_t tryIndex = 0;
13339 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
13341 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
13342 VmaAllocationRequest bestRequest = {};
13343 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
13349 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
13351 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13352 VMA_ASSERT(pCurrBlock);
13353 VmaAllocationRequest currRequest = {};
13354 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13357 m_BufferImageGranularity,
13366 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13367 if(pBestRequestBlock == VMA_NULL ||
13368 currRequestCost < bestRequestCost)
13370 pBestRequestBlock = pCurrBlock;
13371 bestRequest = currRequest;
13372 bestRequestCost = currRequestCost;
13374 if(bestRequestCost == 0)
13385 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13387 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13388 VMA_ASSERT(pCurrBlock);
13389 VmaAllocationRequest currRequest = {};
13390 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13393 m_BufferImageGranularity,
13402 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13403 if(pBestRequestBlock == VMA_NULL ||
13404 currRequestCost < bestRequestCost ||
13407 pBestRequestBlock = pCurrBlock;
13408 bestRequest = currRequest;
13409 bestRequestCost = currRequestCost;
13411 if(bestRequestCost == 0 ||
13421 if(pBestRequestBlock != VMA_NULL)
13425 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13426 if(res != VK_SUCCESS)
13432 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13438 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13439 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13440 UpdateHasEmptyBlock();
13441 (*pAllocation)->InitBlockAllocation(
13443 bestRequest.offset,
13450 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13451 VMA_DEBUG_LOG(
" Returned from existing block");
13452 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13453 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13454 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13456 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13458 if(IsCorruptionDetectionEnabled())
13460 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13461 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13476 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13478 return VK_ERROR_TOO_MANY_OBJECTS;
13482 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13485 void VmaBlockVector::Free(
13488 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13490 bool budgetExceeded =
false;
13492 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13494 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13495 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13500 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13502 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13504 if(IsCorruptionDetectionEnabled())
13506 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13507 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13510 if(hAllocation->IsPersistentMap())
13512 pBlock->Unmap(m_hAllocator, 1);
13515 pBlock->m_pMetadata->Free(hAllocation);
13516 VMA_HEAVY_ASSERT(pBlock->Validate());
13518 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13520 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13522 if(pBlock->m_pMetadata->IsEmpty())
13525 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13527 pBlockToDelete = pBlock;
13534 else if(m_HasEmptyBlock && canDeleteBlock)
13536 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13537 if(pLastBlock->m_pMetadata->IsEmpty())
13539 pBlockToDelete = pLastBlock;
13540 m_Blocks.pop_back();
13544 UpdateHasEmptyBlock();
13545 IncrementallySortBlocks();
13550 if(pBlockToDelete != VMA_NULL)
13552 VMA_DEBUG_LOG(
" Deleted empty block");
13553 pBlockToDelete->Destroy(m_hAllocator);
13554 vma_delete(m_hAllocator, pBlockToDelete);
13558 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13560 VkDeviceSize result = 0;
13561 for(
size_t i = m_Blocks.size(); i--; )
13563 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13564 if(result >= m_PreferredBlockSize)
13572 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13574 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13576 if(m_Blocks[blockIndex] == pBlock)
13578 VmaVectorRemove(m_Blocks, blockIndex);
13585 void VmaBlockVector::IncrementallySortBlocks()
13590 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13592 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13594 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13601 VkResult VmaBlockVector::AllocateFromBlock(
13602 VmaDeviceMemoryBlock* pBlock,
13603 uint32_t currentFrameIndex,
13605 VkDeviceSize alignment,
13608 VmaSuballocationType suballocType,
13617 VmaAllocationRequest currRequest = {};
13618 if(pBlock->m_pMetadata->CreateAllocationRequest(
13621 m_BufferImageGranularity,
13631 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13635 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13636 if(res != VK_SUCCESS)
13642 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13643 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13644 UpdateHasEmptyBlock();
13645 (*pAllocation)->InitBlockAllocation(
13647 currRequest.offset,
13654 VMA_HEAVY_ASSERT(pBlock->Validate());
13655 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13656 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13657 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13659 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13661 if(IsCorruptionDetectionEnabled())
13663 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13664 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13668 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13671 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13673 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13674 allocInfo.pNext = m_pMemoryAllocateNext;
13675 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13676 allocInfo.allocationSize = blockSize;
13678 #if VMA_BUFFER_DEVICE_ADDRESS
13680 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13681 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13683 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13684 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13688 #if VMA_MEMORY_PRIORITY
13689 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
13690 if(m_hAllocator->m_UseExtMemoryPriority)
13692 priorityInfo.priority = m_Priority;
13693 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
13697 #if VMA_EXTERNAL_MEMORY
13699 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
13700 exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex);
13701 if(exportMemoryAllocInfo.handleTypes != 0)
13703 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
13707 VkDeviceMemory mem = VK_NULL_HANDLE;
13708 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13717 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13723 allocInfo.allocationSize,
13727 m_Blocks.push_back(pBlock);
13728 if(pNewBlockIndex != VMA_NULL)
13730 *pNewBlockIndex = m_Blocks.size() - 1;
13736 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13737 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13738 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13740 const size_t blockCount = m_Blocks.size();
13741 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13745 BLOCK_FLAG_USED = 0x00000001,
13746 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13754 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13755 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13756 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13759 const size_t moveCount = moves.size();
13760 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13762 const VmaDefragmentationMove& move = moves[moveIndex];
13763 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13764 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13767 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13770 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13772 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13773 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13774 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13776 currBlockInfo.pMappedData = pBlock->GetMappedData();
13778 if(currBlockInfo.pMappedData == VMA_NULL)
13780 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13781 if(pDefragCtx->res == VK_SUCCESS)
13783 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13790 if(pDefragCtx->res == VK_SUCCESS)
13792 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13793 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13795 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13797 const VmaDefragmentationMove& move = moves[moveIndex];
13799 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13800 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13802 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13807 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13808 memRange.memory = pSrcBlock->GetDeviceMemory();
13809 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13810 memRange.size = VMA_MIN(
13811 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13812 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13813 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13818 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13819 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13820 static_cast<size_t>(move.size));
13822 if(IsCorruptionDetectionEnabled())
13824 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13825 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13831 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13832 memRange.memory = pDstBlock->GetDeviceMemory();
13833 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13834 memRange.size = VMA_MIN(
13835 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13836 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13837 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13844 for(
size_t blockIndex = blockCount; blockIndex--; )
13846 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13847 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13849 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13850 pBlock->Unmap(m_hAllocator, 1);
13855 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13856 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13857 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13858 VkCommandBuffer commandBuffer)
13860 const size_t blockCount = m_Blocks.size();
13862 pDefragCtx->blockContexts.resize(blockCount);
13863 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13866 const size_t moveCount = moves.size();
13867 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13869 const VmaDefragmentationMove& move = moves[moveIndex];
13874 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13875 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13879 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13883 VkBufferCreateInfo bufCreateInfo;
13884 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13886 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13888 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13889 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13890 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13892 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13893 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13894 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13895 if(pDefragCtx->res == VK_SUCCESS)
13897 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13898 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13905 if(pDefragCtx->res == VK_SUCCESS)
13907 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13909 const VmaDefragmentationMove& move = moves[moveIndex];
13911 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13912 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13914 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13916 VkBufferCopy region = {
13920 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13921 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13926 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13928 pDefragCtx->res = VK_NOT_READY;
13934 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13936 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13937 if(pBlock->m_pMetadata->IsEmpty())
13939 if(m_Blocks.size() > m_MinBlockCount)
13941 if(pDefragmentationStats != VMA_NULL)
13944 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13947 VmaVectorRemove(m_Blocks, blockIndex);
13948 pBlock->Destroy(m_hAllocator);
13949 vma_delete(m_hAllocator, pBlock);
13957 UpdateHasEmptyBlock();
13960 void VmaBlockVector::UpdateHasEmptyBlock()
13962 m_HasEmptyBlock =
false;
13963 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13965 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13966 if(pBlock->m_pMetadata->IsEmpty())
13968 m_HasEmptyBlock =
true;
13974 #if VMA_STATS_STRING_ENABLED
13976 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13978 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13980 json.BeginObject();
13984 const char* poolName = m_hParentPool->GetName();
13985 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13987 json.WriteString(
"Name");
13988 json.WriteString(poolName);
13991 json.WriteString(
"MemoryTypeIndex");
13992 json.WriteNumber(m_MemoryTypeIndex);
13994 json.WriteString(
"BlockSize");
13995 json.WriteNumber(m_PreferredBlockSize);
13997 json.WriteString(
"BlockCount");
13998 json.BeginObject(
true);
13999 if(m_MinBlockCount > 0)
14001 json.WriteString(
"Min");
14002 json.WriteNumber((uint64_t)m_MinBlockCount);
14004 if(m_MaxBlockCount < SIZE_MAX)
14006 json.WriteString(
"Max");
14007 json.WriteNumber((uint64_t)m_MaxBlockCount);
14009 json.WriteString(
"Cur");
14010 json.WriteNumber((uint64_t)m_Blocks.size());
14013 if(m_FrameInUseCount > 0)
14015 json.WriteString(
"FrameInUseCount");
14016 json.WriteNumber(m_FrameInUseCount);
14019 if(m_Algorithm != 0)
14021 json.WriteString(
"Algorithm");
14022 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
14027 json.WriteString(
"PreferredBlockSize");
14028 json.WriteNumber(m_PreferredBlockSize);
14031 json.WriteString(
"Blocks");
14032 json.BeginObject();
14033 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14035 json.BeginString();
14036 json.ContinueString(m_Blocks[i]->GetId());
14039 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
14048 void VmaBlockVector::Defragment(
14049 class VmaBlockVectorDefragmentationContext* pCtx,
14051 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
14052 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
14053 VkCommandBuffer commandBuffer)
14055 pCtx->res = VK_SUCCESS;
14057 const VkMemoryPropertyFlags memPropFlags =
14058 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
14059 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
14061 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
14063 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
14064 !IsCorruptionDetectionEnabled() &&
14065 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
14068 if(canDefragmentOnCpu || canDefragmentOnGpu)
14070 bool defragmentOnGpu;
14072 if(canDefragmentOnGpu != canDefragmentOnCpu)
14074 defragmentOnGpu = canDefragmentOnGpu;
14079 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
14080 m_hAllocator->IsIntegratedGpu();
14083 bool overlappingMoveSupported = !defragmentOnGpu;
14085 if(m_hAllocator->m_UseMutex)
14089 if(!m_Mutex.TryLockWrite())
14091 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
14097 m_Mutex.LockWrite();
14098 pCtx->mutexLocked =
true;
14102 pCtx->Begin(overlappingMoveSupported, flags);
14106 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
14107 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
14108 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
14111 if(pStats != VMA_NULL)
14113 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
14114 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
14117 VMA_ASSERT(bytesMoved <= maxBytesToMove);
14118 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
14119 if(defragmentOnGpu)
14121 maxGpuBytesToMove -= bytesMoved;
14122 maxGpuAllocationsToMove -= allocationsMoved;
14126 maxCpuBytesToMove -= bytesMoved;
14127 maxCpuAllocationsToMove -= allocationsMoved;
14133 if(m_hAllocator->m_UseMutex)
14134 m_Mutex.UnlockWrite();
14136 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
14137 pCtx->res = VK_NOT_READY;
14142 if(pCtx->res >= VK_SUCCESS)
14144 if(defragmentOnGpu)
14146 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
14150 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
14156 void VmaBlockVector::DefragmentationEnd(
14157 class VmaBlockVectorDefragmentationContext* pCtx,
14163 VMA_ASSERT(pCtx->mutexLocked ==
false);
14167 m_Mutex.LockWrite();
14168 pCtx->mutexLocked =
true;
14172 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
14175 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
14177 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
14178 if(blockCtx.hBuffer)
14180 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
14184 if(pCtx->res >= VK_SUCCESS)
14186 FreeEmptyBlocks(pStats);
14190 if(pCtx->mutexLocked)
14192 VMA_ASSERT(m_hAllocator->m_UseMutex);
14193 m_Mutex.UnlockWrite();
14197 uint32_t VmaBlockVector::ProcessDefragmentations(
14198 class VmaBlockVectorDefragmentationContext *pCtx,
14201 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14203 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
14205 for(uint32_t i = 0; i < moveCount; ++ i)
14207 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
14210 pMove->
memory = move.pDstBlock->GetDeviceMemory();
14211 pMove->
offset = move.dstOffset;
14216 pCtx->defragmentationMovesProcessed += moveCount;
14221 void VmaBlockVector::CommitDefragmentations(
14222 class VmaBlockVectorDefragmentationContext *pCtx,
14225 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14227 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
14229 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
14231 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
14232 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
14235 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
14236 FreeEmptyBlocks(pStats);
14239 size_t VmaBlockVector::CalcAllocationCount()
const
14242 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14244 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
14249 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
14251 if(m_BufferImageGranularity == 1)
14255 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
14256 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
14258 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
14259 VMA_ASSERT(m_Algorithm == 0);
14260 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
14261 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
14269 void VmaBlockVector::MakePoolAllocationsLost(
14270 uint32_t currentFrameIndex,
14271 size_t* pLostAllocationCount)
14273 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
14274 size_t lostAllocationCount = 0;
14275 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14277 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14278 VMA_ASSERT(pBlock);
14279 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
14281 if(pLostAllocationCount != VMA_NULL)
14283 *pLostAllocationCount = lostAllocationCount;
14287 VkResult VmaBlockVector::CheckCorruption()
14289 if(!IsCorruptionDetectionEnabled())
14291 return VK_ERROR_FEATURE_NOT_PRESENT;
14294 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14295 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14297 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14298 VMA_ASSERT(pBlock);
14299 VkResult res = pBlock->CheckCorruption(m_hAllocator);
14300 if(res != VK_SUCCESS)
14308 void VmaBlockVector::AddStats(
VmaStats* pStats)
14310 const uint32_t memTypeIndex = m_MemoryTypeIndex;
14311 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
14313 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
14315 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
14317 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
14318 VMA_ASSERT(pBlock);
14319 VMA_HEAVY_ASSERT(pBlock->Validate());
14321 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
14322 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14323 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14324 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14331 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
14333 VmaBlockVector* pBlockVector,
14334 uint32_t currentFrameIndex,
14335 bool overlappingMoveSupported) :
14336 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14337 m_AllocationCount(0),
14338 m_AllAllocations(false),
14340 m_AllocationsMoved(0),
14341 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
14344 const size_t blockCount = m_pBlockVector->m_Blocks.size();
14345 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14347 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
14348 pBlockInfo->m_OriginalBlockIndex = blockIndex;
14349 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
14350 m_Blocks.push_back(pBlockInfo);
14354 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
14357 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
14359 for(
size_t i = m_Blocks.size(); i--; )
14361 vma_delete(m_hAllocator, m_Blocks[i]);
14365 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14368 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
14370 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
14371 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
14372 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
14374 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
14375 (*it)->m_Allocations.push_back(allocInfo);
14382 ++m_AllocationCount;
14386 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
14387 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14388 VkDeviceSize maxBytesToMove,
14389 uint32_t maxAllocationsToMove,
14390 bool freeOldAllocations)
14392 if(m_Blocks.empty())
14405 size_t srcBlockMinIndex = 0;
14418 size_t srcBlockIndex = m_Blocks.size() - 1;
14419 size_t srcAllocIndex = SIZE_MAX;
14425 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14427 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14430 if(srcBlockIndex == srcBlockMinIndex)
14437 srcAllocIndex = SIZE_MAX;
14442 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14446 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14447 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14449 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14450 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14451 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14452 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14455 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14457 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14458 VmaAllocationRequest dstAllocRequest;
14459 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14460 m_CurrentFrameIndex,
14461 m_pBlockVector->GetFrameInUseCount(),
14462 m_pBlockVector->GetBufferImageGranularity(),
14469 &dstAllocRequest) &&
14471 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14473 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14476 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14477 (m_BytesMoved + size > maxBytesToMove))
14482 VmaDefragmentationMove move = {};
14483 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14484 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14485 move.srcOffset = srcOffset;
14486 move.dstOffset = dstAllocRequest.offset;
14488 move.hAllocation = allocInfo.m_hAllocation;
14489 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14490 move.pDstBlock = pDstBlockInfo->m_pBlock;
14492 moves.push_back(move);
14494 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14498 allocInfo.m_hAllocation);
14500 if(freeOldAllocations)
14502 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14503 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14506 if(allocInfo.m_pChanged != VMA_NULL)
14508 *allocInfo.m_pChanged = VK_TRUE;
14511 ++m_AllocationsMoved;
14512 m_BytesMoved += size;
14514 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14522 if(srcAllocIndex > 0)
14528 if(srcBlockIndex > 0)
14531 srcAllocIndex = SIZE_MAX;
14541 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14544 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14546 if(m_Blocks[i]->m_HasNonMovableAllocations)
14554 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14555 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14556 VkDeviceSize maxBytesToMove,
14557 uint32_t maxAllocationsToMove,
14560 if(!m_AllAllocations && m_AllocationCount == 0)
14565 const size_t blockCount = m_Blocks.size();
14566 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14568 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14570 if(m_AllAllocations)
14572 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14573 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14574 it != pMetadata->m_Suballocations.end();
14577 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14579 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14580 pBlockInfo->m_Allocations.push_back(allocInfo);
14585 pBlockInfo->CalcHasNonMovableAllocations();
14589 pBlockInfo->SortAllocationsByOffsetDescending();
14595 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14598 const uint32_t roundCount = 2;
14601 VkResult result = VK_SUCCESS;
14602 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14610 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14611 size_t dstBlockIndex, VkDeviceSize dstOffset,
14612 size_t srcBlockIndex, VkDeviceSize srcOffset)
14614 if(dstBlockIndex < srcBlockIndex)
14618 if(dstBlockIndex > srcBlockIndex)
14622 if(dstOffset < srcOffset)
14632 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14634 VmaBlockVector* pBlockVector,
14635 uint32_t currentFrameIndex,
14636 bool overlappingMoveSupported) :
14637 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14638 m_OverlappingMoveSupported(overlappingMoveSupported),
14639 m_AllocationCount(0),
14640 m_AllAllocations(false),
14642 m_AllocationsMoved(0),
14643 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14645 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14649 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14653 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14654 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14655 VkDeviceSize maxBytesToMove,
14656 uint32_t maxAllocationsToMove,
14659 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14661 const size_t blockCount = m_pBlockVector->GetBlockCount();
14662 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14667 PreprocessMetadata();
14671 m_BlockInfos.resize(blockCount);
14672 for(
size_t i = 0; i < blockCount; ++i)
14674 m_BlockInfos[i].origBlockIndex = i;
14677 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14678 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14679 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14684 FreeSpaceDatabase freeSpaceDb;
14686 size_t dstBlockInfoIndex = 0;
14687 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14688 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14689 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14690 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14691 VkDeviceSize dstOffset = 0;
14694 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14696 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14697 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14698 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14699 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14700 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14702 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14703 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14704 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14705 if(m_AllocationsMoved == maxAllocationsToMove ||
14706 m_BytesMoved + srcAllocSize > maxBytesToMove)
14711 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14713 VmaDefragmentationMove move = {};
14715 size_t freeSpaceInfoIndex;
14716 VkDeviceSize dstAllocOffset;
14717 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14718 freeSpaceInfoIndex, dstAllocOffset))
14720 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14721 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14722 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14725 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14727 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14731 VmaSuballocation suballoc = *srcSuballocIt;
14732 suballoc.offset = dstAllocOffset;
14733 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14734 m_BytesMoved += srcAllocSize;
14735 ++m_AllocationsMoved;
14737 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14739 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14740 srcSuballocIt = nextSuballocIt;
14742 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14744 move.srcBlockIndex = srcOrigBlockIndex;
14745 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14746 move.srcOffset = srcAllocOffset;
14747 move.dstOffset = dstAllocOffset;
14748 move.size = srcAllocSize;
14750 moves.push_back(move);
14757 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14759 VmaSuballocation suballoc = *srcSuballocIt;
14760 suballoc.offset = dstAllocOffset;
14761 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14762 m_BytesMoved += srcAllocSize;
14763 ++m_AllocationsMoved;
14765 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14767 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14768 srcSuballocIt = nextSuballocIt;
14770 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14772 move.srcBlockIndex = srcOrigBlockIndex;
14773 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14774 move.srcOffset = srcAllocOffset;
14775 move.dstOffset = dstAllocOffset;
14776 move.size = srcAllocSize;
14778 moves.push_back(move);
14783 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14786 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14787 dstAllocOffset + srcAllocSize > dstBlockSize)
14790 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14792 ++dstBlockInfoIndex;
14793 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14794 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14795 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14796 dstBlockSize = pDstMetadata->GetSize();
14798 dstAllocOffset = 0;
14802 if(dstBlockInfoIndex == srcBlockInfoIndex)
14804 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14806 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14808 bool skipOver = overlap;
14809 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14813 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14818 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14820 dstOffset = srcAllocOffset + srcAllocSize;
14826 srcSuballocIt->offset = dstAllocOffset;
14827 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14828 dstOffset = dstAllocOffset + srcAllocSize;
14829 m_BytesMoved += srcAllocSize;
14830 ++m_AllocationsMoved;
14833 move.srcBlockIndex = srcOrigBlockIndex;
14834 move.dstBlockIndex = dstOrigBlockIndex;
14835 move.srcOffset = srcAllocOffset;
14836 move.dstOffset = dstAllocOffset;
14837 move.size = srcAllocSize;
14839 moves.push_back(move);
14847 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14848 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14850 VmaSuballocation suballoc = *srcSuballocIt;
14851 suballoc.offset = dstAllocOffset;
14852 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14853 dstOffset = dstAllocOffset + srcAllocSize;
14854 m_BytesMoved += srcAllocSize;
14855 ++m_AllocationsMoved;
14857 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14859 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14860 srcSuballocIt = nextSuballocIt;
14862 pDstMetadata->m_Suballocations.push_back(suballoc);
14864 move.srcBlockIndex = srcOrigBlockIndex;
14865 move.dstBlockIndex = dstOrigBlockIndex;
14866 move.srcOffset = srcAllocOffset;
14867 move.dstOffset = dstAllocOffset;
14868 move.size = srcAllocSize;
14870 moves.push_back(move);
14876 m_BlockInfos.clear();
14878 PostprocessMetadata();
14883 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14885 const size_t blockCount = m_pBlockVector->GetBlockCount();
14886 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14888 VmaBlockMetadata_Generic*
const pMetadata =
14889 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14890 pMetadata->m_FreeCount = 0;
14891 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14892 pMetadata->m_FreeSuballocationsBySize.clear();
14893 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14894 it != pMetadata->m_Suballocations.end(); )
14896 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14898 VmaSuballocationList::iterator nextIt = it;
14900 pMetadata->m_Suballocations.erase(it);
14911 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14913 const size_t blockCount = m_pBlockVector->GetBlockCount();
14914 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14916 VmaBlockMetadata_Generic*
const pMetadata =
14917 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14918 const VkDeviceSize blockSize = pMetadata->GetSize();
14921 if(pMetadata->m_Suballocations.empty())
14923 pMetadata->m_FreeCount = 1;
14925 VmaSuballocation suballoc = {
14929 VMA_SUBALLOCATION_TYPE_FREE };
14930 pMetadata->m_Suballocations.push_back(suballoc);
14931 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14936 VkDeviceSize offset = 0;
14937 VmaSuballocationList::iterator it;
14938 for(it = pMetadata->m_Suballocations.begin();
14939 it != pMetadata->m_Suballocations.end();
14942 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14943 VMA_ASSERT(it->offset >= offset);
14946 if(it->offset > offset)
14948 ++pMetadata->m_FreeCount;
14949 const VkDeviceSize freeSize = it->offset - offset;
14950 VmaSuballocation suballoc = {
14954 VMA_SUBALLOCATION_TYPE_FREE };
14955 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14956 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14958 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14962 pMetadata->m_SumFreeSize -= it->size;
14963 offset = it->offset + it->size;
14967 if(offset < blockSize)
14969 ++pMetadata->m_FreeCount;
14970 const VkDeviceSize freeSize = blockSize - offset;
14971 VmaSuballocation suballoc = {
14975 VMA_SUBALLOCATION_TYPE_FREE };
14976 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14977 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14978 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14980 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14985 pMetadata->m_FreeSuballocationsBySize.begin(),
14986 pMetadata->m_FreeSuballocationsBySize.end(),
14987 VmaSuballocationItemSizeLess());
14990 VMA_HEAVY_ASSERT(pMetadata->Validate());
14994 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14997 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14998 while(it != pMetadata->m_Suballocations.end())
15000 if(it->offset < suballoc.offset)
15005 pMetadata->m_Suballocations.insert(it, suballoc);
15011 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
15014 VmaBlockVector* pBlockVector,
15015 uint32_t currFrameIndex) :
15017 mutexLocked(false),
15018 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
15019 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
15020 defragmentationMovesProcessed(0),
15021 defragmentationMovesCommitted(0),
15022 hasDefragmentationPlan(0),
15023 m_hAllocator(hAllocator),
15024 m_hCustomPool(hCustomPool),
15025 m_pBlockVector(pBlockVector),
15026 m_CurrFrameIndex(currFrameIndex),
15027 m_pAlgorithm(VMA_NULL),
15028 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
15029 m_AllAllocations(false)
15033 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
15035 vma_delete(m_hAllocator, m_pAlgorithm);
15038 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
15040 AllocInfo info = { hAlloc, pChanged };
15041 m_Allocations.push_back(info);
15044 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
15046 const bool allAllocations = m_AllAllocations ||
15047 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
15060 if(VMA_DEBUG_MARGIN == 0 &&
15062 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
15065 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
15066 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15070 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
15071 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
15076 m_pAlgorithm->AddAll();
15080 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
15082 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
15090 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
15092 uint32_t currFrameIndex,
15095 m_hAllocator(hAllocator),
15096 m_CurrFrameIndex(currFrameIndex),
15099 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
15101 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
15104 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
15106 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15108 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
15109 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15110 vma_delete(m_hAllocator, pBlockVectorCtx);
15112 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
15114 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
15115 if(pBlockVectorCtx)
15117 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
15118 vma_delete(m_hAllocator, pBlockVectorCtx);
15123 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
15125 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15127 VmaPool pool = pPools[poolIndex];
15130 if(pool->m_BlockVector.GetAlgorithm() == 0)
15132 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15134 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15136 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
15138 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15143 if(!pBlockVectorDefragCtx)
15145 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15148 &pool->m_BlockVector,
15150 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15153 pBlockVectorDefragCtx->AddAll();
15158 void VmaDefragmentationContext_T::AddAllocations(
15159 uint32_t allocationCount,
15161 VkBool32* pAllocationsChanged)
15164 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15167 VMA_ASSERT(hAlloc);
15169 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
15171 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
15173 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
15175 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
15177 if(hAllocPool != VK_NULL_HANDLE)
15180 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
15182 for(
size_t i = m_CustomPoolContexts.size(); i--; )
15184 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
15186 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
15190 if(!pBlockVectorDefragCtx)
15192 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15195 &hAllocPool->m_BlockVector,
15197 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
15204 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
15205 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
15206 if(!pBlockVectorDefragCtx)
15208 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
15211 m_hAllocator->m_pBlockVectors[memTypeIndex],
15213 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
15217 if(pBlockVectorDefragCtx)
15219 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
15220 &pAllocationsChanged[allocIndex] : VMA_NULL;
15221 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
15227 VkResult VmaDefragmentationContext_T::Defragment(
15228 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
15229 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
15241 m_MaxCpuBytesToMove = maxCpuBytesToMove;
15242 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
15244 m_MaxGpuBytesToMove = maxGpuBytesToMove;
15245 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
15247 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
15248 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
15251 return VK_NOT_READY;
15254 if(commandBuffer == VK_NULL_HANDLE)
15256 maxGpuBytesToMove = 0;
15257 maxGpuAllocationsToMove = 0;
15260 VkResult res = VK_SUCCESS;
15263 for(uint32_t memTypeIndex = 0;
15264 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
15267 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15268 if(pBlockVectorCtx)
15270 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15271 pBlockVectorCtx->GetBlockVector()->Defragment(
15274 maxCpuBytesToMove, maxCpuAllocationsToMove,
15275 maxGpuBytesToMove, maxGpuAllocationsToMove,
15277 if(pBlockVectorCtx->res != VK_SUCCESS)
15279 res = pBlockVectorCtx->res;
15285 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15286 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
15289 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15290 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15291 pBlockVectorCtx->GetBlockVector()->Defragment(
15294 maxCpuBytesToMove, maxCpuAllocationsToMove,
15295 maxGpuBytesToMove, maxGpuAllocationsToMove,
15297 if(pBlockVectorCtx->res != VK_SUCCESS)
15299 res = pBlockVectorCtx->res;
15312 for(uint32_t memTypeIndex = 0;
15313 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15316 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15317 if(pBlockVectorCtx)
15319 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15321 if(!pBlockVectorCtx->hasDefragmentationPlan)
15323 pBlockVectorCtx->GetBlockVector()->Defragment(
15326 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15327 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15330 if(pBlockVectorCtx->res < VK_SUCCESS)
15333 pBlockVectorCtx->hasDefragmentationPlan =
true;
15336 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15338 pCurrentMove, movesLeft);
15340 movesLeft -= processed;
15341 pCurrentMove += processed;
15346 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15347 customCtxIndex < customCtxCount;
15350 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15351 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15353 if(!pBlockVectorCtx->hasDefragmentationPlan)
15355 pBlockVectorCtx->GetBlockVector()->Defragment(
15358 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
15359 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
15362 if(pBlockVectorCtx->res < VK_SUCCESS)
15365 pBlockVectorCtx->hasDefragmentationPlan =
true;
15368 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
15370 pCurrentMove, movesLeft);
15372 movesLeft -= processed;
15373 pCurrentMove += processed;
15380 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
15382 VkResult res = VK_SUCCESS;
15385 for(uint32_t memTypeIndex = 0;
15386 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
15389 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
15390 if(pBlockVectorCtx)
15392 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
15394 if(!pBlockVectorCtx->hasDefragmentationPlan)
15396 res = VK_NOT_READY;
15400 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15401 pBlockVectorCtx, m_pStats);
15403 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15404 res = VK_NOT_READY;
15409 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15410 customCtxIndex < customCtxCount;
15413 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15414 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15416 if(!pBlockVectorCtx->hasDefragmentationPlan)
15418 res = VK_NOT_READY;
15422 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15423 pBlockVectorCtx, m_pStats);
15425 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15426 res = VK_NOT_READY;
15435 #if VMA_RECORDING_ENABLED
15437 VmaRecorder::VmaRecorder() :
15441 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15447 m_UseMutex = useMutex;
15448 m_Flags = settings.
flags;
15450 #if defined(_WIN32)
15452 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15456 return VK_ERROR_INITIALIZATION_FAILED;
15460 m_File = fopen(settings.
pFilePath,
"wb");
15464 return VK_ERROR_INITIALIZATION_FAILED;
15469 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15470 fprintf(m_File,
"%s\n",
"1,8");
15475 VmaRecorder::~VmaRecorder()
15477 if(m_File != VMA_NULL)
15483 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15485 CallParams callParams;
15486 GetBasicParams(callParams);
15488 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15489 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15493 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15495 CallParams callParams;
15496 GetBasicParams(callParams);
15498 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15499 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15505 CallParams callParams;
15506 GetBasicParams(callParams);
15508 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15509 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15520 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15522 CallParams callParams;
15523 GetBasicParams(callParams);
15525 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15526 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15531 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15532 const VkMemoryRequirements& vkMemReq,
15536 CallParams callParams;
15537 GetBasicParams(callParams);
15539 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15540 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15541 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15543 vkMemReq.alignment,
15544 vkMemReq.memoryTypeBits,
15552 userDataStr.GetString());
15556 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15557 const VkMemoryRequirements& vkMemReq,
15559 uint64_t allocationCount,
15562 CallParams callParams;
15563 GetBasicParams(callParams);
15565 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15566 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15567 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15569 vkMemReq.alignment,
15570 vkMemReq.memoryTypeBits,
15577 PrintPointerList(allocationCount, pAllocations);
15578 fprintf(m_File,
",%s\n", userDataStr.GetString());
15582 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15583 const VkMemoryRequirements& vkMemReq,
15584 bool requiresDedicatedAllocation,
15585 bool prefersDedicatedAllocation,
15589 CallParams callParams;
15590 GetBasicParams(callParams);
15592 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15593 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15594 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15596 vkMemReq.alignment,
15597 vkMemReq.memoryTypeBits,
15598 requiresDedicatedAllocation ? 1 : 0,
15599 prefersDedicatedAllocation ? 1 : 0,
15607 userDataStr.GetString());
15611 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15612 const VkMemoryRequirements& vkMemReq,
15613 bool requiresDedicatedAllocation,
15614 bool prefersDedicatedAllocation,
15618 CallParams callParams;
15619 GetBasicParams(callParams);
15621 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15622 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15623 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15625 vkMemReq.alignment,
15626 vkMemReq.memoryTypeBits,
15627 requiresDedicatedAllocation ? 1 : 0,
15628 prefersDedicatedAllocation ? 1 : 0,
15636 userDataStr.GetString());
15640 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15643 CallParams callParams;
15644 GetBasicParams(callParams);
15646 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15647 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15652 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15653 uint64_t allocationCount,
15656 CallParams callParams;
15657 GetBasicParams(callParams);
15659 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15660 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15661 PrintPointerList(allocationCount, pAllocations);
15662 fprintf(m_File,
"\n");
15666 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15668 const void* pUserData)
15670 CallParams callParams;
15671 GetBasicParams(callParams);
15673 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15674 UserDataString userDataStr(
15677 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15679 userDataStr.GetString());
15683 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15686 CallParams callParams;
15687 GetBasicParams(callParams);
15689 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15690 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15695 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15698 CallParams callParams;
15699 GetBasicParams(callParams);
15701 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15702 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15707 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15710 CallParams callParams;
15711 GetBasicParams(callParams);
15713 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15714 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15719 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15720 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15722 CallParams callParams;
15723 GetBasicParams(callParams);
15725 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15726 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15733 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15734 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15736 CallParams callParams;
15737 GetBasicParams(callParams);
15739 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15740 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15747 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15748 const VkBufferCreateInfo& bufCreateInfo,
15752 CallParams callParams;
15753 GetBasicParams(callParams);
15755 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15756 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15757 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15758 bufCreateInfo.flags,
15759 bufCreateInfo.size,
15760 bufCreateInfo.usage,
15761 bufCreateInfo.sharingMode,
15762 allocCreateInfo.
flags,
15763 allocCreateInfo.
usage,
15767 allocCreateInfo.
pool,
15769 userDataStr.GetString());
15773 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15774 const VkImageCreateInfo& imageCreateInfo,
15778 CallParams callParams;
15779 GetBasicParams(callParams);
15781 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15782 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15783 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15784 imageCreateInfo.flags,
15785 imageCreateInfo.imageType,
15786 imageCreateInfo.format,
15787 imageCreateInfo.extent.width,
15788 imageCreateInfo.extent.height,
15789 imageCreateInfo.extent.depth,
15790 imageCreateInfo.mipLevels,
15791 imageCreateInfo.arrayLayers,
15792 imageCreateInfo.samples,
15793 imageCreateInfo.tiling,
15794 imageCreateInfo.usage,
15795 imageCreateInfo.sharingMode,
15796 imageCreateInfo.initialLayout,
15797 allocCreateInfo.
flags,
15798 allocCreateInfo.
usage,
15802 allocCreateInfo.
pool,
15804 userDataStr.GetString());
15808 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15811 CallParams callParams;
15812 GetBasicParams(callParams);
15814 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15815 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15820 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15823 CallParams callParams;
15824 GetBasicParams(callParams);
15826 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15827 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15832 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15835 CallParams callParams;
15836 GetBasicParams(callParams);
15838 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15839 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15844 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15847 CallParams callParams;
15848 GetBasicParams(callParams);
15850 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15851 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15856 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15859 CallParams callParams;
15860 GetBasicParams(callParams);
15862 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15863 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15868 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15872 CallParams callParams;
15873 GetBasicParams(callParams);
15875 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15876 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15879 fprintf(m_File,
",");
15881 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15891 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15894 CallParams callParams;
15895 GetBasicParams(callParams);
15897 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15898 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15903 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15907 CallParams callParams;
15908 GetBasicParams(callParams);
15910 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15911 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15912 pool, name != VMA_NULL ? name :
"");
15918 if(pUserData != VMA_NULL)
15922 m_Str = (
const char*)pUserData;
15927 snprintf(m_PtrStr, 17,
"%p", pUserData);
15937 void VmaRecorder::WriteConfiguration(
15938 const VkPhysicalDeviceProperties& devProps,
15939 const VkPhysicalDeviceMemoryProperties& memProps,
15940 uint32_t vulkanApiVersion,
15941 bool dedicatedAllocationExtensionEnabled,
15942 bool bindMemory2ExtensionEnabled,
15943 bool memoryBudgetExtensionEnabled,
15944 bool deviceCoherentMemoryExtensionEnabled)
15946 fprintf(m_File,
"Config,Begin\n");
15948 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15950 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15951 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15952 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15953 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15954 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15955 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15957 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15958 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15959 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15961 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15962 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15964 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15965 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15967 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15968 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15970 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15971 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15974 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15975 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15976 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15977 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15979 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15980 fprintf(m_File,
"Macro,VMA_MIN_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_MIN_ALIGNMENT);
15981 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15982 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15983 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15984 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15985 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15986 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15987 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15989 fprintf(m_File,
"Config,End\n");
15992 void VmaRecorder::GetBasicParams(CallParams& outParams)
15994 #if defined(_WIN32)
15995 outParams.threadId = GetCurrentThreadId();
16000 std::thread::id thread_id = std::this_thread::get_id();
16001 std::stringstream thread_id_to_string_converter;
16002 thread_id_to_string_converter << thread_id;
16003 std::string thread_id_as_string = thread_id_to_string_converter.str();
16004 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
16007 auto current_time = std::chrono::high_resolution_clock::now();
16009 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
16012 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
16016 fprintf(m_File,
"%p", pItems[0]);
16017 for(uint64_t i = 1; i < count; ++i)
16019 fprintf(m_File,
" %p", pItems[i]);
16024 void VmaRecorder::Flush()
16037 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
16038 m_Allocator(pAllocationCallbacks, 1024)
16042 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
16044 VmaMutexLock mutexLock(m_Mutex);
16045 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
16048 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
16050 VmaMutexLock mutexLock(m_Mutex);
16051 m_Allocator.Free(hAlloc);
16059 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
16066 m_hDevice(pCreateInfo->device),
16067 m_hInstance(pCreateInfo->instance),
16068 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
16069 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
16070 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
16071 m_AllocationObjectAllocator(&m_AllocationCallbacks),
16072 m_HeapSizeLimitMask(0),
16073 m_DeviceMemoryCount(0),
16074 m_PreferredLargeHeapBlockSize(0),
16075 m_PhysicalDevice(pCreateInfo->physicalDevice),
16076 m_CurrentFrameIndex(0),
16077 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
16079 m_GlobalMemoryTypeBits(UINT32_MAX)
16081 ,m_pRecorder(VMA_NULL)
16084 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16086 m_UseKhrDedicatedAllocation =
false;
16087 m_UseKhrBindMemory2 =
false;
16090 if(VMA_DEBUG_DETECT_CORRUPTION)
16093 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
16098 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
16100 #if !(VMA_DEDICATED_ALLOCATION)
16103 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
16106 #if !(VMA_BIND_MEMORY2)
16109 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
16113 #if !(VMA_MEMORY_BUDGET)
16116 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
16119 #if !(VMA_BUFFER_DEVICE_ADDRESS)
16120 if(m_UseKhrBufferDeviceAddress)
16122 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16125 #if VMA_VULKAN_VERSION < 1002000
16126 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
16128 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
16131 #if VMA_VULKAN_VERSION < 1001000
16132 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16134 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
16137 #if !(VMA_MEMORY_PRIORITY)
16138 if(m_UseExtMemoryPriority)
16140 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
16144 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
16145 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
16146 memset(&m_MemProps, 0,
sizeof(m_MemProps));
16148 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
16149 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
16151 #if VMA_EXTERNAL_MEMORY
16152 memset(&m_TypeExternalMemoryHandleTypes, 0,
sizeof(m_TypeExternalMemoryHandleTypes));
16164 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
16165 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
16167 VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT));
16168 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
16169 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
16170 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
16175 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
16177 #if VMA_EXTERNAL_MEMORY
16181 sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount());
16187 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16189 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
16190 if(limit != VK_WHOLE_SIZE)
16192 m_HeapSizeLimitMask |= 1u << heapIndex;
16193 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
16195 m_MemProps.memoryHeaps[heapIndex].size = limit;
16201 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16203 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
16205 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
16209 preferredBlockSize,
16212 GetBufferImageGranularity(),
16217 GetMemoryTypeMinAlignment(memTypeIndex),
16226 VkResult res = VK_SUCCESS;
16231 #if VMA_RECORDING_ENABLED
16232 m_pRecorder = vma_new(
this, VmaRecorder)();
16234 if(res != VK_SUCCESS)
16238 m_pRecorder->WriteConfiguration(
16239 m_PhysicalDeviceProperties,
16241 m_VulkanApiVersion,
16242 m_UseKhrDedicatedAllocation,
16243 m_UseKhrBindMemory2,
16244 m_UseExtMemoryBudget,
16245 m_UseAmdDeviceCoherentMemory);
16246 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
16248 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
16249 return VK_ERROR_FEATURE_NOT_PRESENT;
16253 #if VMA_MEMORY_BUDGET
16254 if(m_UseExtMemoryBudget)
16256 UpdateVulkanBudget();
16263 VmaAllocator_T::~VmaAllocator_T()
16265 #if VMA_RECORDING_ENABLED
16266 if(m_pRecorder != VMA_NULL)
16268 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
16269 vma_delete(
this, m_pRecorder);
16273 VMA_ASSERT(m_Pools.IsEmpty());
16275 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
16277 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
16279 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
16282 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
16286 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
16288 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16289 ImportVulkanFunctions_Static();
16292 if(pVulkanFunctions != VMA_NULL)
16294 ImportVulkanFunctions_Custom(pVulkanFunctions);
16297 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16298 ImportVulkanFunctions_Dynamic();
16301 ValidateVulkanFunctions();
16304 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
16306 void VmaAllocator_T::ImportVulkanFunctions_Static()
16309 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
16310 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
16311 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
16312 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
16313 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
16314 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
16315 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
16316 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
16317 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
16318 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
16319 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
16320 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
16321 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
16322 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
16323 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
16324 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
16325 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
16328 #if VMA_VULKAN_VERSION >= 1001000
16329 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16331 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
16332 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
16333 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
16334 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
16335 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
16342 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
16344 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
16346 #define VMA_COPY_IF_NOT_NULL(funcName) \
16347 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
16349 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
16350 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
16351 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
16352 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
16353 VMA_COPY_IF_NOT_NULL(vkMapMemory);
16354 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
16355 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
16356 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
16357 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
16358 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
16359 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
16360 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
16361 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
16362 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
16363 VMA_COPY_IF_NOT_NULL(vkCreateImage);
16364 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
16365 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
16367 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16368 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
16369 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
16372 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16373 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
16374 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
16377 #if VMA_MEMORY_BUDGET
16378 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
16381 #undef VMA_COPY_IF_NOT_NULL
16384 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16386 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
16388 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
16389 if(m_VulkanFunctions.memberName == VMA_NULL) \
16390 m_VulkanFunctions.memberName = \
16391 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
16392 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
16393 if(m_VulkanFunctions.memberName == VMA_NULL) \
16394 m_VulkanFunctions.memberName = \
16395 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
16397 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
16398 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
16399 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
16400 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
16401 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
16402 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
16403 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
16404 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
16405 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
16406 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
16407 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
16408 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
16409 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
16410 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
16411 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
16412 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
16413 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16415 #if VMA_VULKAN_VERSION >= 1001000
16416 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16418 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16419 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16420 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16421 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16422 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16426 #if VMA_DEDICATED_ALLOCATION
16427 if(m_UseKhrDedicatedAllocation)
16429 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16430 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16434 #if VMA_BIND_MEMORY2
16435 if(m_UseKhrBindMemory2)
16437 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16438 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16442 #if VMA_MEMORY_BUDGET
16443 if(m_UseExtMemoryBudget)
16445 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16449 #undef VMA_FETCH_DEVICE_FUNC
16450 #undef VMA_FETCH_INSTANCE_FUNC
16455 void VmaAllocator_T::ValidateVulkanFunctions()
16457 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16458 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16459 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16460 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16461 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16462 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16463 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16464 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16465 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16466 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16467 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16468 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16469 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16470 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16471 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16472 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16473 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16475 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16476 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16478 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16479 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16483 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16484 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16486 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16487 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16491 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16492 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16494 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16499 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16501 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16502 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16503 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16504 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16507 VkResult VmaAllocator_T::AllocateMemoryOfType(
16509 VkDeviceSize alignment,
16510 bool dedicatedAllocation,
16511 VkBuffer dedicatedBuffer,
16512 VkBufferUsageFlags dedicatedBufferUsage,
16513 VkImage dedicatedImage,
16515 uint32_t memTypeIndex,
16516 VmaSuballocationType suballocType,
16517 size_t allocationCount,
16520 VMA_ASSERT(pAllocations != VMA_NULL);
16521 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16527 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16537 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16538 VMA_ASSERT(blockVector);
16540 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16541 bool preferDedicatedMemory =
16542 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16543 dedicatedAllocation ||
16545 size > preferredBlockSize / 2;
16547 if(preferDedicatedMemory &&
16549 finalCreateInfo.
pool == VK_NULL_HANDLE)
16558 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16562 return AllocateDedicatedMemory(
16572 dedicatedBufferUsage,
16580 VkResult res = blockVector->Allocate(
16581 m_CurrentFrameIndex.load(),
16588 if(res == VK_SUCCESS)
16596 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16602 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
16604 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16607 res = AllocateDedicatedMemory(
16617 dedicatedBufferUsage,
16621 if(res == VK_SUCCESS)
16624 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16630 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16636 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16638 VmaSuballocationType suballocType,
16639 uint32_t memTypeIndex,
16642 bool isUserDataString,
16645 VkBuffer dedicatedBuffer,
16646 VkBufferUsageFlags dedicatedBufferUsage,
16647 VkImage dedicatedImage,
16648 size_t allocationCount,
16651 VMA_ASSERT(allocationCount > 0 && pAllocations);
16655 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16657 GetBudget(&heapBudget, heapIndex, 1);
16658 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16660 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16664 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16665 allocInfo.memoryTypeIndex = memTypeIndex;
16666 allocInfo.allocationSize = size;
16668 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16669 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16670 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16672 if(dedicatedBuffer != VK_NULL_HANDLE)
16674 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16675 dedicatedAllocInfo.buffer = dedicatedBuffer;
16676 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16678 else if(dedicatedImage != VK_NULL_HANDLE)
16680 dedicatedAllocInfo.image = dedicatedImage;
16681 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16686 #if VMA_BUFFER_DEVICE_ADDRESS
16687 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16688 if(m_UseKhrBufferDeviceAddress)
16690 bool canContainBufferWithDeviceAddress =
true;
16691 if(dedicatedBuffer != VK_NULL_HANDLE)
16693 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16694 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16696 else if(dedicatedImage != VK_NULL_HANDLE)
16698 canContainBufferWithDeviceAddress =
false;
16700 if(canContainBufferWithDeviceAddress)
16702 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16703 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16708 #if VMA_MEMORY_PRIORITY
16709 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
16710 if(m_UseExtMemoryPriority)
16712 priorityInfo.priority = priority;
16713 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
16717 #if VMA_EXTERNAL_MEMORY
16719 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
16720 exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex);
16721 if(exportMemoryAllocInfo.handleTypes != 0)
16723 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
16728 VkResult res = VK_SUCCESS;
16729 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16731 res = AllocateDedicatedMemoryPage(
16739 pAllocations + allocIndex);
16740 if(res != VK_SUCCESS)
16746 if(res == VK_SUCCESS)
16750 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16751 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
16752 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16754 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
16758 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16763 while(allocIndex--)
16766 VkDeviceMemory hMemory = currAlloc->GetMemory();
16778 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16779 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16780 currAlloc->SetUserData(
this, VMA_NULL);
16781 m_AllocationObjectAllocator.Free(currAlloc);
16784 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16790 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16792 VmaSuballocationType suballocType,
16793 uint32_t memTypeIndex,
16794 const VkMemoryAllocateInfo& allocInfo,
16796 bool isUserDataString,
16800 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16801 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16804 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16808 void* pMappedData = VMA_NULL;
16811 res = (*m_VulkanFunctions.vkMapMemory)(
16820 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16821 FreeVulkanMemory(memTypeIndex, size, hMemory);
16826 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16827 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16828 (*pAllocation)->SetUserData(
this, pUserData);
16829 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16830 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16832 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16838 void VmaAllocator_T::GetBufferMemoryRequirements(
16840 VkMemoryRequirements& memReq,
16841 bool& requiresDedicatedAllocation,
16842 bool& prefersDedicatedAllocation)
const
16844 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16845 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16847 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16848 memReqInfo.buffer = hBuffer;
16850 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16852 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16853 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16855 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16857 memReq = memReq2.memoryRequirements;
16858 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16859 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16864 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16865 requiresDedicatedAllocation =
false;
16866 prefersDedicatedAllocation =
false;
16870 void VmaAllocator_T::GetImageMemoryRequirements(
16872 VkMemoryRequirements& memReq,
16873 bool& requiresDedicatedAllocation,
16874 bool& prefersDedicatedAllocation)
const
16876 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16877 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16879 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16880 memReqInfo.image = hImage;
16882 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16884 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16885 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16887 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16889 memReq = memReq2.memoryRequirements;
16890 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16891 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16896 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16897 requiresDedicatedAllocation =
false;
16898 prefersDedicatedAllocation =
false;
16902 VkResult VmaAllocator_T::AllocateMemory(
16903 const VkMemoryRequirements& vkMemReq,
16904 bool requiresDedicatedAllocation,
16905 bool prefersDedicatedAllocation,
16906 VkBuffer dedicatedBuffer,
16907 VkBufferUsageFlags dedicatedBufferUsage,
16908 VkImage dedicatedImage,
16910 VmaSuballocationType suballocType,
16911 size_t allocationCount,
16914 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16916 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16918 if(vkMemReq.size == 0)
16920 return VK_ERROR_VALIDATION_FAILED_EXT;
16925 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16926 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16931 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16932 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16934 if(requiresDedicatedAllocation)
16938 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16939 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16941 if(createInfo.
pool != VK_NULL_HANDLE)
16943 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16944 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16947 if((createInfo.
pool != VK_NULL_HANDLE) &&
16950 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16951 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16954 if(createInfo.
pool != VK_NULL_HANDLE)
16959 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16964 return createInfo.
pool->m_BlockVector.Allocate(
16965 m_CurrentFrameIndex.load(),
16967 vkMemReq.alignment,
16976 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16977 uint32_t memTypeIndex = UINT32_MAX;
16979 if(res == VK_SUCCESS)
16981 res = AllocateMemoryOfType(
16983 vkMemReq.alignment,
16984 requiresDedicatedAllocation || prefersDedicatedAllocation,
16986 dedicatedBufferUsage,
16994 if(res == VK_SUCCESS)
17004 memoryTypeBits &= ~(1u << memTypeIndex);
17007 if(res == VK_SUCCESS)
17009 res = AllocateMemoryOfType(
17011 vkMemReq.alignment,
17012 requiresDedicatedAllocation || prefersDedicatedAllocation,
17014 dedicatedBufferUsage,
17022 if(res == VK_SUCCESS)
17032 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17043 void VmaAllocator_T::FreeMemory(
17044 size_t allocationCount,
17047 VMA_ASSERT(pAllocations);
17049 for(
size_t allocIndex = allocationCount; allocIndex--; )
17053 if(allocation != VK_NULL_HANDLE)
17055 if(TouchAllocation(allocation))
17057 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
17059 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
17062 switch(allocation->GetType())
17064 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17066 VmaBlockVector* pBlockVector = VMA_NULL;
17067 VmaPool hPool = allocation->GetBlock()->GetParentPool();
17068 if(hPool != VK_NULL_HANDLE)
17070 pBlockVector = &hPool->m_BlockVector;
17074 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17075 pBlockVector = m_pBlockVectors[memTypeIndex];
17077 pBlockVector->Free(allocation);
17080 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17081 FreeDedicatedMemory(allocation);
17089 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
17090 allocation->SetUserData(
this, VMA_NULL);
17091 m_AllocationObjectAllocator.Free(allocation);
17096 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
17099 InitStatInfo(pStats->
total);
17100 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
17102 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
17106 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17108 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17109 VMA_ASSERT(pBlockVector);
17110 pBlockVector->AddStats(pStats);
17115 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17116 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17118 pool->m_BlockVector.AddStats(pStats);
17123 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17125 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
17126 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17127 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
17129 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
17132 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
17133 VmaAddStatInfo(pStats->
total, allocationStatInfo);
17134 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
17135 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
17140 VmaPostprocessCalcStatInfo(pStats->
total);
17141 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
17142 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
17143 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
17144 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
17147 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
17149 #if VMA_MEMORY_BUDGET
17150 if(m_UseExtMemoryBudget)
17152 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
17154 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
17155 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17157 const uint32_t heapIndex = firstHeap + i;
17159 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17162 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
17164 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
17165 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17169 outBudget->
usage = 0;
17173 outBudget->
budget = VMA_MIN(
17174 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
17179 UpdateVulkanBudget();
17180 GetBudget(outBudget, firstHeap, heapCount);
17186 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
17188 const uint32_t heapIndex = firstHeap + i;
17190 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
17194 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17199 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
17201 VkResult VmaAllocator_T::DefragmentationBegin(
17211 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
17212 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
17215 (*pContext)->AddAllocations(
17218 VkResult res = (*pContext)->Defragment(
17223 if(res != VK_NOT_READY)
17225 vma_delete(
this, *pContext);
17226 *pContext = VMA_NULL;
17232 VkResult VmaAllocator_T::DefragmentationEnd(
17235 vma_delete(
this, context);
17239 VkResult VmaAllocator_T::DefragmentationPassBegin(
17243 return context->DefragmentPassBegin(pInfo);
17245 VkResult VmaAllocator_T::DefragmentationPassEnd(
17248 return context->DefragmentPassEnd();
17254 if(hAllocation->CanBecomeLost())
17260 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17261 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17264 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17268 pAllocationInfo->
offset = 0;
17269 pAllocationInfo->
size = hAllocation->GetSize();
17271 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17274 else if(localLastUseFrameIndex == localCurrFrameIndex)
17276 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17277 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17278 pAllocationInfo->
offset = hAllocation->GetOffset();
17279 pAllocationInfo->
size = hAllocation->GetSize();
17281 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17286 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17288 localLastUseFrameIndex = localCurrFrameIndex;
17295 #if VMA_STATS_STRING_ENABLED
17296 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17297 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17300 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17301 if(localLastUseFrameIndex == localCurrFrameIndex)
17307 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17309 localLastUseFrameIndex = localCurrFrameIndex;
17315 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
17316 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
17317 pAllocationInfo->
offset = hAllocation->GetOffset();
17318 pAllocationInfo->
size = hAllocation->GetSize();
17319 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
17320 pAllocationInfo->
pUserData = hAllocation->GetUserData();
17324 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
17327 if(hAllocation->CanBecomeLost())
17329 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17330 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17333 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
17337 else if(localLastUseFrameIndex == localCurrFrameIndex)
17343 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17345 localLastUseFrameIndex = localCurrFrameIndex;
17352 #if VMA_STATS_STRING_ENABLED
17353 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
17354 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
17357 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
17358 if(localLastUseFrameIndex == localCurrFrameIndex)
17364 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
17366 localLastUseFrameIndex = localCurrFrameIndex;
17378 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
17394 return VK_ERROR_INITIALIZATION_FAILED;
17398 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
17400 return VK_ERROR_FEATURE_NOT_PRESENT;
17407 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
17409 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
17411 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
17412 if(res != VK_SUCCESS)
17414 vma_delete(
this, *pPool);
17421 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17422 (*pPool)->SetId(m_NextPoolId++);
17423 m_Pools.PushBack(*pPool);
17429 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17433 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17434 m_Pools.Remove(pool);
17437 vma_delete(
this, pool);
17442 pool->m_BlockVector.GetPoolStats(pPoolStats);
17445 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17447 m_CurrentFrameIndex.store(frameIndex);
17449 #if VMA_MEMORY_BUDGET
17450 if(m_UseExtMemoryBudget)
17452 UpdateVulkanBudget();
17457 void VmaAllocator_T::MakePoolAllocationsLost(
17459 size_t* pLostAllocationCount)
17461 hPool->m_BlockVector.MakePoolAllocationsLost(
17462 m_CurrentFrameIndex.load(),
17463 pLostAllocationCount);
17466 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17468 return hPool->m_BlockVector.CheckCorruption();
17471 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17473 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17476 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17478 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17480 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17481 VMA_ASSERT(pBlockVector);
17482 VkResult localRes = pBlockVector->CheckCorruption();
17485 case VK_ERROR_FEATURE_NOT_PRESENT:
17488 finalRes = VK_SUCCESS;
17498 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17499 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
17501 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17503 VkResult localRes = pool->m_BlockVector.CheckCorruption();
17506 case VK_ERROR_FEATURE_NOT_PRESENT:
17509 finalRes = VK_SUCCESS;
17521 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17523 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17524 (*pAllocation)->InitLost();
17528 template<
typename T>
17529 struct AtomicTransactionalIncrement
17532 typedef std::atomic<T> AtomicT;
17533 ~AtomicTransactionalIncrement()
17538 T Increment(AtomicT* atomic)
17541 return m_Atomic->fetch_add(1);
17545 m_Atomic =
nullptr;
17549 AtomicT* m_Atomic =
nullptr;
17552 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17554 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
17555 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
17556 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
17557 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
17559 return VK_ERROR_TOO_MANY_OBJECTS;
17563 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17566 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17568 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17569 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17572 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17573 if(blockBytesAfterAllocation > heapSize)
17575 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17577 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17585 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17589 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17591 if(res == VK_SUCCESS)
17593 #if VMA_MEMORY_BUDGET
17594 ++m_Budget.m_OperationsSinceBudgetFetch;
17598 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17600 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17603 deviceMemoryCountIncrement.Commit();
17607 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17613 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17616 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17618 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17622 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17624 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17626 --m_DeviceMemoryCount;
17629 VkResult VmaAllocator_T::BindVulkanBuffer(
17630 VkDeviceMemory memory,
17631 VkDeviceSize memoryOffset,
17635 if(pNext != VMA_NULL)
17637 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17638 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17639 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17641 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17642 bindBufferMemoryInfo.pNext = pNext;
17643 bindBufferMemoryInfo.buffer = buffer;
17644 bindBufferMemoryInfo.memory = memory;
17645 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17646 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17651 return VK_ERROR_EXTENSION_NOT_PRESENT;
17656 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17660 VkResult VmaAllocator_T::BindVulkanImage(
17661 VkDeviceMemory memory,
17662 VkDeviceSize memoryOffset,
17666 if(pNext != VMA_NULL)
17668 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17669 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17670 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17672 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17673 bindBufferMemoryInfo.pNext = pNext;
17674 bindBufferMemoryInfo.image = image;
17675 bindBufferMemoryInfo.memory = memory;
17676 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17677 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17682 return VK_ERROR_EXTENSION_NOT_PRESENT;
17687 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17691 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17693 if(hAllocation->CanBecomeLost())
17695 return VK_ERROR_MEMORY_MAP_FAILED;
17698 switch(hAllocation->GetType())
17700 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17702 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17703 char *pBytes = VMA_NULL;
17704 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17705 if(res == VK_SUCCESS)
17707 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17708 hAllocation->BlockAllocMap();
17712 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17713 return hAllocation->DedicatedAllocMap(
this, ppData);
17716 return VK_ERROR_MEMORY_MAP_FAILED;
17722 switch(hAllocation->GetType())
17724 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17726 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17727 hAllocation->BlockAllocUnmap();
17728 pBlock->Unmap(
this, 1);
17731 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17732 hAllocation->DedicatedAllocUnmap(
this);
17739 VkResult VmaAllocator_T::BindBufferMemory(
17741 VkDeviceSize allocationLocalOffset,
17745 VkResult res = VK_SUCCESS;
17746 switch(hAllocation->GetType())
17748 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17749 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17751 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17753 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17754 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17755 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17764 VkResult VmaAllocator_T::BindImageMemory(
17766 VkDeviceSize allocationLocalOffset,
17770 VkResult res = VK_SUCCESS;
17771 switch(hAllocation->GetType())
17773 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17774 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17776 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17778 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17779 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17780 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17789 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17791 VkDeviceSize offset, VkDeviceSize size,
17792 VMA_CACHE_OPERATION op)
17794 VkResult res = VK_SUCCESS;
17796 VkMappedMemoryRange memRange = {};
17797 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17801 case VMA_CACHE_FLUSH:
17802 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17804 case VMA_CACHE_INVALIDATE:
17805 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17815 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17816 uint32_t allocationCount,
17818 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17819 VMA_CACHE_OPERATION op)
17821 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17822 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17823 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17825 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17828 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17829 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17830 VkMappedMemoryRange newRange;
17831 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17833 ranges.push_back(newRange);
17837 VkResult res = VK_SUCCESS;
17838 if(!ranges.empty())
17842 case VMA_CACHE_FLUSH:
17843 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17845 case VMA_CACHE_INVALIDATE:
17846 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17856 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17858 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17860 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17862 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17863 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
17864 dedicatedAllocations.Remove(allocation);
17867 VkDeviceMemory hMemory = allocation->GetMemory();
17879 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17881 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17884 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17886 VkBufferCreateInfo dummyBufCreateInfo;
17887 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17889 uint32_t memoryTypeBits = 0;
17892 VkBuffer buf = VK_NULL_HANDLE;
17893 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17894 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17895 if(res == VK_SUCCESS)
17898 VkMemoryRequirements memReq;
17899 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17900 memoryTypeBits = memReq.memoryTypeBits;
17903 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17906 return memoryTypeBits;
17909 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17912 VMA_ASSERT(GetMemoryTypeCount() > 0);
17914 uint32_t memoryTypeBits = UINT32_MAX;
17916 if(!m_UseAmdDeviceCoherentMemory)
17919 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17921 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17923 memoryTypeBits &= ~(1u << memTypeIndex);
17928 return memoryTypeBits;
17931 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17933 VkDeviceSize offset, VkDeviceSize size,
17934 VkMappedMemoryRange& outRange)
const
17936 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17937 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17939 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17940 const VkDeviceSize allocationSize = allocation->GetSize();
17941 VMA_ASSERT(offset <= allocationSize);
17943 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17944 outRange.pNext = VMA_NULL;
17945 outRange.memory = allocation->GetMemory();
17947 switch(allocation->GetType())
17949 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17950 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17951 if(size == VK_WHOLE_SIZE)
17953 outRange.size = allocationSize - outRange.offset;
17957 VMA_ASSERT(offset + size <= allocationSize);
17958 outRange.size = VMA_MIN(
17959 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17960 allocationSize - outRange.offset);
17963 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17966 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17967 if(size == VK_WHOLE_SIZE)
17969 size = allocationSize - offset;
17973 VMA_ASSERT(offset + size <= allocationSize);
17975 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17978 const VkDeviceSize allocationOffset = allocation->GetOffset();
17979 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17980 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17981 outRange.offset += allocationOffset;
17982 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17994 #if VMA_MEMORY_BUDGET
17996 void VmaAllocator_T::UpdateVulkanBudget()
17998 VMA_ASSERT(m_UseExtMemoryBudget);
18000 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
18002 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
18003 VmaPnextChainPushFront(&memProps, &budgetProps);
18005 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
18008 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
18010 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
18012 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
18013 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
18014 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
18017 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
18019 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
18021 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
18023 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
18025 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
18027 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
18030 m_Budget.m_OperationsSinceBudgetFetch = 0;
18036 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
18038 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
18039 !hAllocation->CanBecomeLost() &&
18040 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18042 void* pData = VMA_NULL;
18043 VkResult res = Map(hAllocation, &pData);
18044 if(res == VK_SUCCESS)
18046 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
18047 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
18048 Unmap(hAllocation);
18052 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
18057 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
18059 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
18060 if(memoryTypeBits == UINT32_MAX)
18062 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
18063 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
18065 return memoryTypeBits;
18068 #if VMA_STATS_STRING_ENABLED
18070 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
18072 bool dedicatedAllocationsStarted =
false;
18073 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18075 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
18076 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
18077 if(!dedicatedAllocList.IsEmpty())
18079 if(dedicatedAllocationsStarted ==
false)
18081 dedicatedAllocationsStarted =
true;
18082 json.WriteString(
"DedicatedAllocations");
18083 json.BeginObject();
18086 json.BeginString(
"Type ");
18087 json.ContinueString(memTypeIndex);
18093 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
18095 json.BeginObject(
true);
18096 alloc->PrintParameters(json);
18103 if(dedicatedAllocationsStarted)
18109 bool allocationsStarted =
false;
18110 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
18112 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
18114 if(allocationsStarted ==
false)
18116 allocationsStarted =
true;
18117 json.WriteString(
"DefaultPools");
18118 json.BeginObject();
18121 json.BeginString(
"Type ");
18122 json.ContinueString(memTypeIndex);
18125 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
18128 if(allocationsStarted)
18136 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
18137 if(!m_Pools.IsEmpty())
18139 json.WriteString(
"Pools");
18140 json.BeginObject();
18141 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
18143 json.BeginString();
18144 json.ContinueString(pool->GetId());
18147 pool->m_BlockVector.PrintDetailedMap(json);
18163 VMA_ASSERT(pCreateInfo && pAllocator);
18166 VMA_DEBUG_LOG(
"vmaCreateAllocator");
18168 return (*pAllocator)->Init(pCreateInfo);
18174 if(allocator != VK_NULL_HANDLE)
18176 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
18177 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
18178 vma_delete(&allocationCallbacks, allocator);
18184 VMA_ASSERT(allocator && pAllocatorInfo);
18185 pAllocatorInfo->
instance = allocator->m_hInstance;
18186 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
18187 pAllocatorInfo->
device = allocator->m_hDevice;
18192 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
18194 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
18195 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
18200 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
18202 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
18203 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
18208 uint32_t memoryTypeIndex,
18209 VkMemoryPropertyFlags* pFlags)
18211 VMA_ASSERT(allocator && pFlags);
18212 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
18213 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
18218 uint32_t frameIndex)
18220 VMA_ASSERT(allocator);
18221 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
18223 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18225 allocator->SetCurrentFrameIndex(frameIndex);
18232 VMA_ASSERT(allocator && pStats);
18233 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18234 allocator->CalculateStats(pStats);
18241 VMA_ASSERT(allocator && pBudget);
18242 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18243 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
18246 #if VMA_STATS_STRING_ENABLED
18250 char** ppStatsString,
18251 VkBool32 detailedMap)
18253 VMA_ASSERT(allocator && ppStatsString);
18254 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18256 VmaStringBuilder sb(allocator);
18258 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
18259 json.BeginObject();
18262 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
18265 allocator->CalculateStats(&stats);
18267 json.WriteString(
"Total");
18268 VmaPrintStatInfo(json, stats.
total);
18270 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
18272 json.BeginString(
"Heap ");
18273 json.ContinueString(heapIndex);
18275 json.BeginObject();
18277 json.WriteString(
"Size");
18278 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
18280 json.WriteString(
"Flags");
18281 json.BeginArray(
true);
18282 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
18284 json.WriteString(
"DEVICE_LOCAL");
18288 json.WriteString(
"Budget");
18289 json.BeginObject();
18291 json.WriteString(
"BlockBytes");
18292 json.WriteNumber(budget[heapIndex].blockBytes);
18293 json.WriteString(
"AllocationBytes");
18294 json.WriteNumber(budget[heapIndex].allocationBytes);
18295 json.WriteString(
"Usage");
18296 json.WriteNumber(budget[heapIndex].usage);
18297 json.WriteString(
"Budget");
18298 json.WriteNumber(budget[heapIndex].budget);
18304 json.WriteString(
"Stats");
18305 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
18308 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
18310 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
18312 json.BeginString(
"Type ");
18313 json.ContinueString(typeIndex);
18316 json.BeginObject();
18318 json.WriteString(
"Flags");
18319 json.BeginArray(
true);
18320 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
18321 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
18323 json.WriteString(
"DEVICE_LOCAL");
18325 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
18327 json.WriteString(
"HOST_VISIBLE");
18329 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
18331 json.WriteString(
"HOST_COHERENT");
18333 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
18335 json.WriteString(
"HOST_CACHED");
18337 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
18339 json.WriteString(
"LAZILY_ALLOCATED");
18341 #if VMA_VULKAN_VERSION >= 1001000
18342 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
18344 json.WriteString(
"PROTECTED");
18347 #if VK_AMD_device_coherent_memory
18348 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
18350 json.WriteString(
"DEVICE_COHERENT");
18352 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
18354 json.WriteString(
"DEVICE_UNCACHED");
18361 json.WriteString(
"Stats");
18362 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
18371 if(detailedMap == VK_TRUE)
18373 allocator->PrintDetailedMap(json);
18379 const size_t len = sb.GetLength();
18380 char*
const pChars = vma_new_array(allocator,
char, len + 1);
18383 memcpy(pChars, sb.GetData(), len);
18385 pChars[len] =
'\0';
18386 *ppStatsString = pChars;
18391 char* pStatsString)
18393 if(pStatsString != VMA_NULL)
18395 VMA_ASSERT(allocator);
18396 size_t len = strlen(pStatsString);
18397 vma_delete_array(allocator, pStatsString, len + 1);
18408 uint32_t memoryTypeBits,
18410 uint32_t* pMemoryTypeIndex)
18412 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18413 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18414 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18416 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
18423 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
18424 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
18425 uint32_t notPreferredFlags = 0;
18428 switch(pAllocationCreateInfo->
usage)
18433 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18435 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18439 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
18442 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18443 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
18445 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18449 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
18450 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
18453 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
18456 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18465 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18467 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18470 *pMemoryTypeIndex = UINT32_MAX;
18471 uint32_t minCost = UINT32_MAX;
18472 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18473 memTypeIndex < allocator->GetMemoryTypeCount();
18474 ++memTypeIndex, memTypeBit <<= 1)
18477 if((memTypeBit & memoryTypeBits) != 0)
18479 const VkMemoryPropertyFlags currFlags =
18480 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18482 if((requiredFlags & ~currFlags) == 0)
18485 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18486 VmaCountBitsSet(currFlags & notPreferredFlags);
18488 if(currCost < minCost)
18490 *pMemoryTypeIndex = memTypeIndex;
18495 minCost = currCost;
18500 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18505 const VkBufferCreateInfo* pBufferCreateInfo,
18507 uint32_t* pMemoryTypeIndex)
18509 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18510 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18511 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18512 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18514 const VkDevice hDev = allocator->m_hDevice;
18515 VkBuffer hBuffer = VK_NULL_HANDLE;
18516 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18517 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18518 if(res == VK_SUCCESS)
18520 VkMemoryRequirements memReq = {};
18521 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18522 hDev, hBuffer, &memReq);
18526 memReq.memoryTypeBits,
18527 pAllocationCreateInfo,
18530 allocator->GetVulkanFunctions().vkDestroyBuffer(
18531 hDev, hBuffer, allocator->GetAllocationCallbacks());
18538 const VkImageCreateInfo* pImageCreateInfo,
18540 uint32_t* pMemoryTypeIndex)
18542 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18543 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18544 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18545 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18547 const VkDevice hDev = allocator->m_hDevice;
18548 VkImage hImage = VK_NULL_HANDLE;
18549 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18550 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18551 if(res == VK_SUCCESS)
18553 VkMemoryRequirements memReq = {};
18554 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18555 hDev, hImage, &memReq);
18559 memReq.memoryTypeBits,
18560 pAllocationCreateInfo,
18563 allocator->GetVulkanFunctions().vkDestroyImage(
18564 hDev, hImage, allocator->GetAllocationCallbacks());
18574 VMA_ASSERT(allocator && pCreateInfo && pPool);
18576 VMA_DEBUG_LOG(
"vmaCreatePool");
18578 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18580 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18582 #if VMA_RECORDING_ENABLED
18583 if(allocator->GetRecorder() != VMA_NULL)
18585 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18596 VMA_ASSERT(allocator);
18598 if(pool == VK_NULL_HANDLE)
18603 VMA_DEBUG_LOG(
"vmaDestroyPool");
18605 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18607 #if VMA_RECORDING_ENABLED
18608 if(allocator->GetRecorder() != VMA_NULL)
18610 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18614 allocator->DestroyPool(pool);
18622 VMA_ASSERT(allocator && pool && pPoolStats);
18624 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18626 allocator->GetPoolStats(pool, pPoolStats);
18632 size_t* pLostAllocationCount)
18634 VMA_ASSERT(allocator && pool);
18636 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18638 #if VMA_RECORDING_ENABLED
18639 if(allocator->GetRecorder() != VMA_NULL)
18641 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18645 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18650 VMA_ASSERT(allocator && pool);
18652 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18654 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18656 return allocator->CheckPoolCorruption(pool);
18662 const char** ppName)
18664 VMA_ASSERT(allocator && pool && ppName);
18666 VMA_DEBUG_LOG(
"vmaGetPoolName");
18668 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18670 *ppName = pool->GetName();
18678 VMA_ASSERT(allocator && pool);
18680 VMA_DEBUG_LOG(
"vmaSetPoolName");
18682 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18684 pool->SetName(pName);
18686 #if VMA_RECORDING_ENABLED
18687 if(allocator->GetRecorder() != VMA_NULL)
18689 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18696 const VkMemoryRequirements* pVkMemoryRequirements,
18701 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18703 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18705 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18707 VkResult result = allocator->AllocateMemory(
18708 *pVkMemoryRequirements,
18715 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18719 #if VMA_RECORDING_ENABLED
18720 if(allocator->GetRecorder() != VMA_NULL)
18722 allocator->GetRecorder()->RecordAllocateMemory(
18723 allocator->GetCurrentFrameIndex(),
18724 *pVkMemoryRequirements,
18730 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18732 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18740 const VkMemoryRequirements* pVkMemoryRequirements,
18742 size_t allocationCount,
18746 if(allocationCount == 0)
18751 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18753 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18755 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18757 VkResult result = allocator->AllocateMemory(
18758 *pVkMemoryRequirements,
18765 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18769 #if VMA_RECORDING_ENABLED
18770 if(allocator->GetRecorder() != VMA_NULL)
18772 allocator->GetRecorder()->RecordAllocateMemoryPages(
18773 allocator->GetCurrentFrameIndex(),
18774 *pVkMemoryRequirements,
18776 (uint64_t)allocationCount,
18781 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18783 for(
size_t i = 0; i < allocationCount; ++i)
18785 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18799 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18801 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18803 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18805 VkMemoryRequirements vkMemReq = {};
18806 bool requiresDedicatedAllocation =
false;
18807 bool prefersDedicatedAllocation =
false;
18808 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18809 requiresDedicatedAllocation,
18810 prefersDedicatedAllocation);
18812 VkResult result = allocator->AllocateMemory(
18814 requiresDedicatedAllocation,
18815 prefersDedicatedAllocation,
18820 VMA_SUBALLOCATION_TYPE_BUFFER,
18824 #if VMA_RECORDING_ENABLED
18825 if(allocator->GetRecorder() != VMA_NULL)
18827 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18828 allocator->GetCurrentFrameIndex(),
18830 requiresDedicatedAllocation,
18831 prefersDedicatedAllocation,
18837 if(pAllocationInfo && result == VK_SUCCESS)
18839 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18852 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18854 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18856 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18858 VkMemoryRequirements vkMemReq = {};
18859 bool requiresDedicatedAllocation =
false;
18860 bool prefersDedicatedAllocation =
false;
18861 allocator->GetImageMemoryRequirements(image, vkMemReq,
18862 requiresDedicatedAllocation, prefersDedicatedAllocation);
18864 VkResult result = allocator->AllocateMemory(
18866 requiresDedicatedAllocation,
18867 prefersDedicatedAllocation,
18872 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18876 #if VMA_RECORDING_ENABLED
18877 if(allocator->GetRecorder() != VMA_NULL)
18879 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18880 allocator->GetCurrentFrameIndex(),
18882 requiresDedicatedAllocation,
18883 prefersDedicatedAllocation,
18889 if(pAllocationInfo && result == VK_SUCCESS)
18891 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18901 VMA_ASSERT(allocator);
18903 if(allocation == VK_NULL_HANDLE)
18908 VMA_DEBUG_LOG(
"vmaFreeMemory");
18910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18912 #if VMA_RECORDING_ENABLED
18913 if(allocator->GetRecorder() != VMA_NULL)
18915 allocator->GetRecorder()->RecordFreeMemory(
18916 allocator->GetCurrentFrameIndex(),
18921 allocator->FreeMemory(
18928 size_t allocationCount,
18931 if(allocationCount == 0)
18936 VMA_ASSERT(allocator);
18938 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18940 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18942 #if VMA_RECORDING_ENABLED
18943 if(allocator->GetRecorder() != VMA_NULL)
18945 allocator->GetRecorder()->RecordFreeMemoryPages(
18946 allocator->GetCurrentFrameIndex(),
18947 (uint64_t)allocationCount,
18952 allocator->FreeMemory(allocationCount, pAllocations);
18960 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18962 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18964 #if VMA_RECORDING_ENABLED
18965 if(allocator->GetRecorder() != VMA_NULL)
18967 allocator->GetRecorder()->RecordGetAllocationInfo(
18968 allocator->GetCurrentFrameIndex(),
18973 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18980 VMA_ASSERT(allocator && allocation);
18982 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18984 #if VMA_RECORDING_ENABLED
18985 if(allocator->GetRecorder() != VMA_NULL)
18987 allocator->GetRecorder()->RecordTouchAllocation(
18988 allocator->GetCurrentFrameIndex(),
18993 return allocator->TouchAllocation(allocation);
19001 VMA_ASSERT(allocator && allocation);
19003 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19005 allocation->SetUserData(allocator, pUserData);
19007 #if VMA_RECORDING_ENABLED
19008 if(allocator->GetRecorder() != VMA_NULL)
19010 allocator->GetRecorder()->RecordSetAllocationUserData(
19011 allocator->GetCurrentFrameIndex(),
19022 VMA_ASSERT(allocator && pAllocation);
19024 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
19026 allocator->CreateLostAllocation(pAllocation);
19028 #if VMA_RECORDING_ENABLED
19029 if(allocator->GetRecorder() != VMA_NULL)
19031 allocator->GetRecorder()->RecordCreateLostAllocation(
19032 allocator->GetCurrentFrameIndex(),
19043 VMA_ASSERT(allocator && allocation && ppData);
19045 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19047 VkResult res = allocator->Map(allocation, ppData);
19049 #if VMA_RECORDING_ENABLED
19050 if(allocator->GetRecorder() != VMA_NULL)
19052 allocator->GetRecorder()->RecordMapMemory(
19053 allocator->GetCurrentFrameIndex(),
19065 VMA_ASSERT(allocator && allocation);
19067 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19069 #if VMA_RECORDING_ENABLED
19070 if(allocator->GetRecorder() != VMA_NULL)
19072 allocator->GetRecorder()->RecordUnmapMemory(
19073 allocator->GetCurrentFrameIndex(),
19078 allocator->Unmap(allocation);
19083 VMA_ASSERT(allocator && allocation);
19085 VMA_DEBUG_LOG(
"vmaFlushAllocation");
19087 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19089 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
19091 #if VMA_RECORDING_ENABLED
19092 if(allocator->GetRecorder() != VMA_NULL)
19094 allocator->GetRecorder()->RecordFlushAllocation(
19095 allocator->GetCurrentFrameIndex(),
19096 allocation, offset, size);
19105 VMA_ASSERT(allocator && allocation);
19107 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
19109 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19111 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
19113 #if VMA_RECORDING_ENABLED
19114 if(allocator->GetRecorder() != VMA_NULL)
19116 allocator->GetRecorder()->RecordInvalidateAllocation(
19117 allocator->GetCurrentFrameIndex(),
19118 allocation, offset, size);
19127 uint32_t allocationCount,
19129 const VkDeviceSize* offsets,
19130 const VkDeviceSize* sizes)
19132 VMA_ASSERT(allocator);
19134 if(allocationCount == 0)
19139 VMA_ASSERT(allocations);
19141 VMA_DEBUG_LOG(
"vmaFlushAllocations");
19143 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19145 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
19147 #if VMA_RECORDING_ENABLED
19148 if(allocator->GetRecorder() != VMA_NULL)
19159 uint32_t allocationCount,
19161 const VkDeviceSize* offsets,
19162 const VkDeviceSize* sizes)
19164 VMA_ASSERT(allocator);
19166 if(allocationCount == 0)
19171 VMA_ASSERT(allocations);
19173 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
19175 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19177 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
19179 #if VMA_RECORDING_ENABLED
19180 if(allocator->GetRecorder() != VMA_NULL)
19191 VMA_ASSERT(allocator);
19193 VMA_DEBUG_LOG(
"vmaCheckCorruption");
19195 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19197 return allocator->CheckCorruption(memoryTypeBits);
19203 size_t allocationCount,
19204 VkBool32* pAllocationsChanged,
19214 if(pDefragmentationInfo != VMA_NULL)
19228 if(res == VK_NOT_READY)
19241 VMA_ASSERT(allocator && pInfo && pContext);
19252 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
19254 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
19256 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19258 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
19260 #if VMA_RECORDING_ENABLED
19261 if(allocator->GetRecorder() != VMA_NULL)
19263 allocator->GetRecorder()->RecordDefragmentationBegin(
19264 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
19275 VMA_ASSERT(allocator);
19277 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
19279 if(context != VK_NULL_HANDLE)
19281 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19283 #if VMA_RECORDING_ENABLED
19284 if(allocator->GetRecorder() != VMA_NULL)
19286 allocator->GetRecorder()->RecordDefragmentationEnd(
19287 allocator->GetCurrentFrameIndex(), context);
19291 return allocator->DefragmentationEnd(context);
19305 VMA_ASSERT(allocator);
19308 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
19310 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19312 if(context == VK_NULL_HANDLE)
19318 return allocator->DefragmentationPassBegin(pInfo, context);
19324 VMA_ASSERT(allocator);
19326 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
19327 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19329 if(context == VK_NULL_HANDLE)
19332 return allocator->DefragmentationPassEnd(context);
19340 VMA_ASSERT(allocator && allocation && buffer);
19342 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
19344 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19346 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
19352 VkDeviceSize allocationLocalOffset,
19356 VMA_ASSERT(allocator && allocation && buffer);
19358 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
19360 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19362 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
19370 VMA_ASSERT(allocator && allocation && image);
19372 VMA_DEBUG_LOG(
"vmaBindImageMemory");
19374 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19376 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
19382 VkDeviceSize allocationLocalOffset,
19386 VMA_ASSERT(allocator && allocation && image);
19388 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
19390 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19392 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
19397 const VkBufferCreateInfo* pBufferCreateInfo,
19403 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
19405 if(pBufferCreateInfo->size == 0)
19407 return VK_ERROR_VALIDATION_FAILED_EXT;
19409 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
19410 !allocator->m_UseKhrBufferDeviceAddress)
19412 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
19413 return VK_ERROR_VALIDATION_FAILED_EXT;
19416 VMA_DEBUG_LOG(
"vmaCreateBuffer");
19418 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19420 *pBuffer = VK_NULL_HANDLE;
19421 *pAllocation = VK_NULL_HANDLE;
19424 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
19425 allocator->m_hDevice,
19427 allocator->GetAllocationCallbacks(),
19432 VkMemoryRequirements vkMemReq = {};
19433 bool requiresDedicatedAllocation =
false;
19434 bool prefersDedicatedAllocation =
false;
19435 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
19436 requiresDedicatedAllocation, prefersDedicatedAllocation);
19439 res = allocator->AllocateMemory(
19441 requiresDedicatedAllocation,
19442 prefersDedicatedAllocation,
19444 pBufferCreateInfo->usage,
19446 *pAllocationCreateInfo,
19447 VMA_SUBALLOCATION_TYPE_BUFFER,
19451 #if VMA_RECORDING_ENABLED
19452 if(allocator->GetRecorder() != VMA_NULL)
19454 allocator->GetRecorder()->RecordCreateBuffer(
19455 allocator->GetCurrentFrameIndex(),
19456 *pBufferCreateInfo,
19457 *pAllocationCreateInfo,
19467 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19472 #if VMA_STATS_STRING_ENABLED
19473 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19475 if(pAllocationInfo != VMA_NULL)
19477 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19482 allocator->FreeMemory(
19485 *pAllocation = VK_NULL_HANDLE;
19486 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19487 *pBuffer = VK_NULL_HANDLE;
19490 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19491 *pBuffer = VK_NULL_HANDLE;
19502 VMA_ASSERT(allocator);
19504 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19509 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19511 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19513 #if VMA_RECORDING_ENABLED
19514 if(allocator->GetRecorder() != VMA_NULL)
19516 allocator->GetRecorder()->RecordDestroyBuffer(
19517 allocator->GetCurrentFrameIndex(),
19522 if(buffer != VK_NULL_HANDLE)
19524 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19527 if(allocation != VK_NULL_HANDLE)
19529 allocator->FreeMemory(
19537 const VkImageCreateInfo* pImageCreateInfo,
19543 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19545 if(pImageCreateInfo->extent.width == 0 ||
19546 pImageCreateInfo->extent.height == 0 ||
19547 pImageCreateInfo->extent.depth == 0 ||
19548 pImageCreateInfo->mipLevels == 0 ||
19549 pImageCreateInfo->arrayLayers == 0)
19551 return VK_ERROR_VALIDATION_FAILED_EXT;
19554 VMA_DEBUG_LOG(
"vmaCreateImage");
19556 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19558 *pImage = VK_NULL_HANDLE;
19559 *pAllocation = VK_NULL_HANDLE;
19562 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19563 allocator->m_hDevice,
19565 allocator->GetAllocationCallbacks(),
19569 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19570 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19571 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19574 VkMemoryRequirements vkMemReq = {};
19575 bool requiresDedicatedAllocation =
false;
19576 bool prefersDedicatedAllocation =
false;
19577 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19578 requiresDedicatedAllocation, prefersDedicatedAllocation);
19580 res = allocator->AllocateMemory(
19582 requiresDedicatedAllocation,
19583 prefersDedicatedAllocation,
19587 *pAllocationCreateInfo,
19592 #if VMA_RECORDING_ENABLED
19593 if(allocator->GetRecorder() != VMA_NULL)
19595 allocator->GetRecorder()->RecordCreateImage(
19596 allocator->GetCurrentFrameIndex(),
19598 *pAllocationCreateInfo,
19608 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19613 #if VMA_STATS_STRING_ENABLED
19614 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19616 if(pAllocationInfo != VMA_NULL)
19618 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19623 allocator->FreeMemory(
19626 *pAllocation = VK_NULL_HANDLE;
19627 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19628 *pImage = VK_NULL_HANDLE;
19631 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19632 *pImage = VK_NULL_HANDLE;
19643 VMA_ASSERT(allocator);
19645 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19650 VMA_DEBUG_LOG(
"vmaDestroyImage");
19652 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19654 #if VMA_RECORDING_ENABLED
19655 if(allocator->GetRecorder() != VMA_NULL)
19657 allocator->GetRecorder()->RecordDestroyImage(
19658 allocator->GetCurrentFrameIndex(),
19663 if(image != VK_NULL_HANDLE)
19665 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19667 if(allocation != VK_NULL_HANDLE)
19669 allocator->FreeMemory(
Definition: vk_mem_alloc.h:2900
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2926
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2932
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2918
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2939
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2913
float priority
A floating-point value between 0 and 1, indicating the priority of the allocation relative to other m...
Definition: vk_mem_alloc.h:2946
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2908
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2902
Represents single memory allocation.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:3267
VkDeviceSize offset
Offset in VkDeviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:3291
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:3311
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:3272
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:3302
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:3316
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:3281
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:2422
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:2427
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2453
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:2478
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:2424
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null.
Definition: vk_mem_alloc.h:2484
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:2436
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2496
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:2433
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:2491
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:2430
uint32_t vulkanApiVersion
Optional. The highest version of Vulkan that the application is designed to use.
Definition: vk_mem_alloc.h:2505
const VkExternalMemoryHandleTypeFlagsKHR * pTypeExternalMemoryHandleTypes
Either null or a pointer to an array of external memory handle types for each Vulkan memory type.
Definition: vk_mem_alloc.h:2516
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:2439
Represents main object of this library initialized.
Information about existing VmaAllocator object.
Definition: vk_mem_alloc.h:2532
VkDevice device
Handle to Vulkan device object.
Definition: vk_mem_alloc.h:2547
VkInstance instance
Handle to Vulkan instance object.
Definition: vk_mem_alloc.h:2537
VkPhysicalDevice physicalDevice
Handle to Vulkan physical device object.
Definition: vk_mem_alloc.h:2542
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
Definition: vk_mem_alloc.h:2638
VkDeviceSize blockBytes
Sum size of all VkDeviceMemory blocks allocated from particular heap, in bytes.
Definition: vk_mem_alloc.h:2641
VkDeviceSize allocationBytes
Sum size of all allocations created in particular heap, in bytes.
Definition: vk_mem_alloc.h:2652
VkDeviceSize usage
Estimated current memory usage of the program, in bytes.
Definition: vk_mem_alloc.h:2662
VkDeviceSize budget
Estimated amount of memory available to the program, in bytes.
Definition: vk_mem_alloc.h:2673
Represents Opaque object that represents started defragmentation process.
Parameters for defragmentation.
Definition: vk_mem_alloc.h:3666
const VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:3706
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:3672
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:3726
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3721
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:3669
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:3687
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:3690
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:3735
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:3716
const VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:3681
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:3711
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:3757
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:3767
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3762
Parameters for incremental defragmentation steps.
Definition: vk_mem_alloc.h:3748
uint32_t moveCount
Definition: vk_mem_alloc.h:3749
VmaDefragmentationPassMoveInfo * pMoves
Definition: vk_mem_alloc.h:3750
Definition: vk_mem_alloc.h:3738
VkDeviceMemory memory
Definition: vk_mem_alloc.h:3740
VkDeviceSize offset
Definition: vk_mem_alloc.h:3741
VmaAllocation allocation
Definition: vk_mem_alloc.h:3739
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:3771
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:3779
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:3773
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:3775
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:3777
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:2231
void * pUserData
Optional, can be null.
Definition: vk_mem_alloc.h:2237
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:2233
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:2235
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:3068
float priority
A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relat...
Definition: vk_mem_alloc.h:3116
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:3071
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:3074
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:3110
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:3083
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:3088
VkDeviceSize minAllocationAlignment
Additional minimum alignment to be used for all allocations created from this pool....
Definition: vk_mem_alloc.h:3123
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:3096
void * pMemoryAllocateNext
Additional pNext chain to be attached to VkMemoryAllocateInfo used for every allocation made by this ...
Definition: vk_mem_alloc.h:3133
Represents custom memory pool.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:3138
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:3141
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:3160
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:3157
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:3147
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3144
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:3150
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:2407
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:2417
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:2409
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:2599
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2610
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2610
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2609
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2611
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:2603
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2611
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2607
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:2601
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2610
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:2605
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2611
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2616
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2618
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2617
VmaStatInfo total
Definition: vk_mem_alloc.h:2619
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:2361
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:2371
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:2376
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:2364
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:2368
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:2373
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:2365
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:2372
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:2369
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:2363
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:2362
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:2375
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:2377
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:2370
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:2366
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:2367
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:2378
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:2374
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:2217
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
struct VmaAllocatorInfo VmaAllocatorInfo
Information about existing VmaAllocator object.
VkResult vmaEndDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context)
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:2029
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:3064
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:2393
@ VMA_RECORD_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2401
@ VMA_RECORD_FLUSH_AFTER_CALL_BIT
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:2399
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:2241
@ VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
Definition: vk_mem_alloc.h:2316
@ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:2246
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT
Definition: vk_mem_alloc.h:2298
@ VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
Definition: vk_mem_alloc.h:2334
@ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT
Definition: vk_mem_alloc.h:2286
@ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:2271
@ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2353
@ VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT
Definition: vk_mem_alloc.h:2351
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2897
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
void vmaFreeMemory(VmaAllocator allocator, const VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:3656
@ VMA_DEFRAGMENTATION_FLAG_INCREMENTAL
Definition: vk_mem_alloc.h:3657
@ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3658
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
struct VmaDefragmentationPassInfo VmaDefragmentationPassInfo
Parameters for incremental defragmentation steps.
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size, void *pUserData)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:2210
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, const VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:3660
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:3008
@ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3043
@ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:3062
@ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:3054
@ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:3026
@ VMA_POOL_CREATE_ALGORITHM_MASK
Definition: vk_mem_alloc.h:3058
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkResult vmaDefragment(VmaAllocator allocator, const VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
VmaMemoryUsage
Definition: vk_mem_alloc.h:2721
@ VMA_MEMORY_USAGE_MAX_ENUM
Definition: vk_mem_alloc.h:2784
@ VMA_MEMORY_USAGE_CPU_ONLY
Definition: vk_mem_alloc.h:2752
@ VMA_MEMORY_USAGE_CPU_COPY
Definition: vk_mem_alloc.h:2774
@ VMA_MEMORY_USAGE_GPU_TO_CPU
Definition: vk_mem_alloc.h:2768
@ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED
Definition: vk_mem_alloc.h:2782
@ VMA_MEMORY_USAGE_CPU_TO_GPU
Definition: vk_mem_alloc.h:2759
@ VMA_MEMORY_USAGE_GPU_ONLY
Definition: vk_mem_alloc.h:2742
@ VMA_MEMORY_USAGE_UNKNOWN
Definition: vk_mem_alloc.h:2725
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
VkResult vmaInvalidateAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Invalidates memory of given set of allocations.
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
VkResult vmaBeginDefragmentationPass(VmaAllocator allocator, VmaDefragmentationContext context, VmaDefragmentationPassInfo *pInfo)
VkResult vmaFlushAllocations(VmaAllocator allocator, uint32_t allocationCount, const VmaAllocation *allocations, const VkDeviceSize *offsets, const VkDeviceSize *sizes)
Flushes memory of given set of allocations.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:2355
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
struct VmaDefragmentationPassMoveInfo VmaDefragmentationPassMoveInfo
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2788
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT
Definition: vk_mem_alloc.h:2883
@ VMA_ALLOCATION_CREATE_MAPPED_BIT
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2819
@ VMA_ALLOCATION_CREATE_DONT_BIND_BIT
Definition: vk_mem_alloc.h:2856
@ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT
Definition: vk_mem_alloc.h:2876
@ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2795
@ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
Definition: vk_mem_alloc.h:2850
@ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
Definition: vk_mem_alloc.h:2832
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT
Definition: vk_mem_alloc.h:2886
@ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
Definition: vk_mem_alloc.h:2839
@ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT
Definition: vk_mem_alloc.h:2865
@ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2806
@ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT
Definition: vk_mem_alloc.h:2880
@ VMA_ALLOCATION_CREATE_STRATEGY_MASK
Definition: vk_mem_alloc.h:2890
@ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT
Definition: vk_mem_alloc.h:2845
@ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT
Definition: vk_mem_alloc.h:2860
@ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT
Definition: vk_mem_alloc.h:2869
@ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM
Definition: vk_mem_alloc.h:2895
void vmaSetPoolName(VmaAllocator allocator, VmaPool pool, const char *pName)
Sets name of a custom pool.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
void vmaGetBudget(VmaAllocator allocator, VmaBudget *pBudget)
Retrieves information about current memory budget for all memory heaps.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
void vmaGetPoolName(VmaAllocator allocator, VmaPool pool, const char **ppName)
Retrieves name of a custom pool.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:2403
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
void vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo *pAllocatorInfo)
Returns information about existing VmaAllocator object - handle to Vulkan device etc.