23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
2030 #ifndef VMA_RECORDING_ENABLED
2031 #define VMA_RECORDING_ENABLED 0
2034 #if !defined(NOMINMAX) && defined(VMA_IMPLEMENTATION)
2035 #define NOMINMAX // For windows.h
2038 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
2039 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
2040 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
2041 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
2042 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
2043 extern PFN_vkAllocateMemory vkAllocateMemory;
2044 extern PFN_vkFreeMemory vkFreeMemory;
2045 extern PFN_vkMapMemory vkMapMemory;
2046 extern PFN_vkUnmapMemory vkUnmapMemory;
2047 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
2048 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
2049 extern PFN_vkBindBufferMemory vkBindBufferMemory;
2050 extern PFN_vkBindImageMemory vkBindImageMemory;
2051 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
2052 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
2053 extern PFN_vkCreateBuffer vkCreateBuffer;
2054 extern PFN_vkDestroyBuffer vkDestroyBuffer;
2055 extern PFN_vkCreateImage vkCreateImage;
2056 extern PFN_vkDestroyImage vkDestroyImage;
2057 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
2058 #if VMA_VULKAN_VERSION >= 1001000
2059 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
2060 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
2061 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
2062 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
2063 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
2064 #endif // #if VMA_VULKAN_VERSION >= 1001000
2065 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
2068 #include <vulkan/vulkan.h>
2074 #if !defined(VMA_VULKAN_VERSION)
2075 #if defined(VK_VERSION_1_2)
2076 #define VMA_VULKAN_VERSION 1002000
2077 #elif defined(VK_VERSION_1_1)
2078 #define VMA_VULKAN_VERSION 1001000
2080 #define VMA_VULKAN_VERSION 1000000
2084 #if !defined(VMA_DEDICATED_ALLOCATION)
2085 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
2086 #define VMA_DEDICATED_ALLOCATION 1
2088 #define VMA_DEDICATED_ALLOCATION 0
2092 #if !defined(VMA_BIND_MEMORY2)
2093 #if VK_KHR_bind_memory2
2094 #define VMA_BIND_MEMORY2 1
2096 #define VMA_BIND_MEMORY2 0
2100 #if !defined(VMA_MEMORY_BUDGET)
2101 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
2102 #define VMA_MEMORY_BUDGET 1
2104 #define VMA_MEMORY_BUDGET 0
2109 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
2110 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
2111 #define VMA_BUFFER_DEVICE_ADDRESS 1
2113 #define VMA_BUFFER_DEVICE_ADDRESS 0
2122 #ifndef VMA_CALL_PRE
2123 #define VMA_CALL_PRE
2125 #ifndef VMA_CALL_POST
2126 #define VMA_CALL_POST
2140 #ifndef VMA_LEN_IF_NOT_NULL
2141 #define VMA_LEN_IF_NOT_NULL(len)
2146 #ifndef VMA_NULLABLE
2148 #define VMA_NULLABLE _Nullable
2150 #define VMA_NULLABLE
2156 #ifndef VMA_NOT_NULL
2158 #define VMA_NOT_NULL _Nonnull
2160 #define VMA_NOT_NULL
2166 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2167 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2168 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2170 #define VMA_NOT_NULL_NON_DISPATCHABLE
2174 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2175 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2176 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2178 #define VMA_NULLABLE_NON_DISPATCHABLE
2196 uint32_t memoryType,
2197 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2199 void* VMA_NULLABLE pUserData);
2203 uint32_t memoryType,
2204 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2206 void* VMA_NULLABLE pUserData);
2346 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2347 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2348 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2350 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2351 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2352 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2354 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2355 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2445 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2518 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2526 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2536 uint32_t memoryTypeIndex,
2537 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2549 uint32_t frameIndex);
2645 #ifndef VMA_STATS_STRING_ENABLED
2646 #define VMA_STATS_STRING_ENABLED 1
2649 #if VMA_STATS_STRING_ENABLED
2656 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2657 VkBool32 detailedMap);
2661 char* VMA_NULLABLE pStatsString);
2663 #endif // #if VMA_STATS_STRING_ENABLED
2915 uint32_t memoryTypeBits,
2917 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2933 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2935 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2951 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2953 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
3097 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3125 size_t* VMA_NULLABLE pLostAllocationCount);
3152 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3162 const char* VMA_NULLABLE pName);
3256 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3282 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3284 size_t allocationCount,
3285 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3286 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3296 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3304 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3329 size_t allocationCount,
3330 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3342 VkDeviceSize newSize);
3399 void* VMA_NULLABLE pUserData);
3456 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3494 VkDeviceSize offset,
3521 VkDeviceSize offset,
3540 uint32_t allocationCount,
3541 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3542 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3543 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3561 uint32_t allocationCount,
3562 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3563 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3564 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3643 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3677 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3815 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3816 size_t allocationCount,
3817 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3836 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3851 VkDeviceSize allocationLocalOffset,
3852 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3853 const void* VMA_NULLABLE pNext);
3870 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3885 VkDeviceSize allocationLocalOffset,
3886 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3887 const void* VMA_NULLABLE pNext);
3921 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3923 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3940 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3946 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3948 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3965 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
3972 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3975 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3976 #define VMA_IMPLEMENTATION
3979 #ifdef VMA_IMPLEMENTATION
3980 #undef VMA_IMPLEMENTATION
3987 #if VMA_RECORDING_ENABLED
3990 #include <windows.h>
4010 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
4011 #define VMA_STATIC_VULKAN_FUNCTIONS 1
4020 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
4021 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
4022 #if defined(VK_NO_PROTOTYPES)
4023 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
4024 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
4037 #if VMA_USE_STL_CONTAINERS
4038 #define VMA_USE_STL_VECTOR 1
4039 #define VMA_USE_STL_UNORDERED_MAP 1
4040 #define VMA_USE_STL_LIST 1
4043 #ifndef VMA_USE_STL_SHARED_MUTEX
4045 #if __cplusplus >= 201703L
4046 #define VMA_USE_STL_SHARED_MUTEX 1
4050 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
4051 #define VMA_USE_STL_SHARED_MUTEX 1
4053 #define VMA_USE_STL_SHARED_MUTEX 0
4061 #if VMA_USE_STL_VECTOR
4065 #if VMA_USE_STL_UNORDERED_MAP
4066 #include <unordered_map>
4069 #if VMA_USE_STL_LIST
4078 #include <algorithm>
4083 #define VMA_NULL nullptr
4086 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
4088 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4091 if(alignment <
sizeof(
void*))
4093 alignment =
sizeof(
void*);
4096 return memalign(alignment, size);
4098 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
4101 #if defined(__APPLE__)
4102 #include <AvailabilityMacros.h>
4105 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4107 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
4108 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
4115 if (__builtin_available(macOS 10.15, iOS 13, *))
4116 return aligned_alloc(alignment, size);
4120 if(alignment <
sizeof(
void*))
4122 alignment =
sizeof(
void*);
4126 if(posix_memalign(&pointer, alignment, size) == 0)
4130 #elif defined(_WIN32)
4131 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4133 return _aligned_malloc(size, alignment);
4136 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
4138 return aligned_alloc(alignment, size);
4143 static void vma_aligned_free(
void* ptr)
4148 static void vma_aligned_free(
void* ptr)
4162 #define VMA_ASSERT(expr)
4164 #define VMA_ASSERT(expr) assert(expr)
4170 #ifndef VMA_HEAVY_ASSERT
4172 #define VMA_HEAVY_ASSERT(expr)
4174 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
4178 #ifndef VMA_ALIGN_OF
4179 #define VMA_ALIGN_OF(type) (__alignof(type))
4182 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4183 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4186 #ifndef VMA_SYSTEM_ALIGNED_FREE
4188 #if defined(VMA_SYSTEM_FREE)
4189 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
4191 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
4196 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4200 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4204 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4208 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4211 #ifndef VMA_DEBUG_LOG
4212 #define VMA_DEBUG_LOG(format, ...)
4222 #if VMA_STATS_STRING_ENABLED
4223 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4225 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4227 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4229 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4231 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4233 snprintf(outStr, strLen,
"%p", ptr);
4241 void Lock() { m_Mutex.lock(); }
4242 void Unlock() { m_Mutex.unlock(); }
4243 bool TryLock() {
return m_Mutex.try_lock(); }
4247 #define VMA_MUTEX VmaMutex
4251 #ifndef VMA_RW_MUTEX
4252 #if VMA_USE_STL_SHARED_MUTEX
4254 #include <shared_mutex>
4258 void LockRead() { m_Mutex.lock_shared(); }
4259 void UnlockRead() { m_Mutex.unlock_shared(); }
4260 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4261 void LockWrite() { m_Mutex.lock(); }
4262 void UnlockWrite() { m_Mutex.unlock(); }
4263 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4265 std::shared_mutex m_Mutex;
4267 #define VMA_RW_MUTEX VmaRWMutex
4268 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4274 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4275 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4276 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4277 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4278 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4279 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4280 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4284 #define VMA_RW_MUTEX VmaRWMutex
4290 void LockRead() { m_Mutex.Lock(); }
4291 void UnlockRead() { m_Mutex.Unlock(); }
4292 bool TryLockRead() {
return m_Mutex.TryLock(); }
4293 void LockWrite() { m_Mutex.Lock(); }
4294 void UnlockWrite() { m_Mutex.Unlock(); }
4295 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4299 #define VMA_RW_MUTEX VmaRWMutex
4300 #endif // #if VMA_USE_STL_SHARED_MUTEX
4301 #endif // #ifndef VMA_RW_MUTEX
4306 #ifndef VMA_ATOMIC_UINT32
4308 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4311 #ifndef VMA_ATOMIC_UINT64
4313 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4316 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4321 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4324 #ifndef VMA_DEBUG_ALIGNMENT
4329 #define VMA_DEBUG_ALIGNMENT (1)
4332 #ifndef VMA_DEBUG_MARGIN
4337 #define VMA_DEBUG_MARGIN (0)
4340 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4345 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4348 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4354 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4357 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4362 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4365 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4370 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4373 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4374 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4378 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4379 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4383 #ifndef VMA_CLASS_NO_COPY
4384 #define VMA_CLASS_NO_COPY(className) \
4386 className(const className&) = delete; \
4387 className& operator=(const className&) = delete;
4390 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4393 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4395 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4396 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4404 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4405 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4406 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4408 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4410 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4411 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4414 static inline uint32_t VmaCountBitsSet(uint32_t v)
4416 uint32_t c = v - ((v >> 1) & 0x55555555);
4417 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4418 c = ((c >> 4) + c) & 0x0F0F0F0F;
4419 c = ((c >> 8) + c) & 0x00FF00FF;
4420 c = ((c >> 16) + c) & 0x0000FFFF;
4429 template <
typename T>
4430 inline bool VmaIsPow2(T x)
4432 return (x & (x-1)) == 0;
4437 template <
typename T>
4438 static inline T VmaAlignUp(T val, T alignment)
4440 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4441 return (val + alignment - 1) & ~(alignment - 1);
4445 template <
typename T>
4446 static inline T VmaAlignDown(T val, T alignment)
4448 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4449 return val & ~(alignment - 1);
4453 template <
typename T>
4454 static inline T VmaRoundDiv(T x, T y)
4456 return (x + (y / (T)2)) / y;
4460 static inline uint32_t VmaNextPow2(uint32_t v)
4471 static inline uint64_t VmaNextPow2(uint64_t v)
4485 static inline uint32_t VmaPrevPow2(uint32_t v)
4495 static inline uint64_t VmaPrevPow2(uint64_t v)
4507 static inline bool VmaStrIsEmpty(
const char* pStr)
4509 return pStr == VMA_NULL || *pStr ==
'\0';
4512 #if VMA_STATS_STRING_ENABLED
4514 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4530 #endif // #if VMA_STATS_STRING_ENABLED
4534 template<
typename Iterator,
typename Compare>
4535 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4537 Iterator centerValue = end; --centerValue;
4538 Iterator insertIndex = beg;
4539 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4541 if(cmp(*memTypeIndex, *centerValue))
4543 if(insertIndex != memTypeIndex)
4545 VMA_SWAP(*memTypeIndex, *insertIndex);
4550 if(insertIndex != centerValue)
4552 VMA_SWAP(*insertIndex, *centerValue);
4557 template<
typename Iterator,
typename Compare>
4558 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4562 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4563 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4564 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4568 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4570 #endif // #ifndef VMA_SORT
4579 static inline bool VmaBlocksOnSamePage(
4580 VkDeviceSize resourceAOffset,
4581 VkDeviceSize resourceASize,
4582 VkDeviceSize resourceBOffset,
4583 VkDeviceSize pageSize)
4585 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4586 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4587 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4588 VkDeviceSize resourceBStart = resourceBOffset;
4589 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4590 return resourceAEndPage == resourceBStartPage;
4593 enum VmaSuballocationType
4595 VMA_SUBALLOCATION_TYPE_FREE = 0,
4596 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4597 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4598 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4599 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4600 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4601 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4610 static inline bool VmaIsBufferImageGranularityConflict(
4611 VmaSuballocationType suballocType1,
4612 VmaSuballocationType suballocType2)
4614 if(suballocType1 > suballocType2)
4616 VMA_SWAP(suballocType1, suballocType2);
4619 switch(suballocType1)
4621 case VMA_SUBALLOCATION_TYPE_FREE:
4623 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4625 case VMA_SUBALLOCATION_TYPE_BUFFER:
4627 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4628 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4629 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4631 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4632 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4633 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4634 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4636 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4637 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4645 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4647 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4648 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4649 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4650 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4652 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4659 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4661 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4662 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4663 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4664 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4666 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4679 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4681 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4682 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4683 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4684 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4690 VMA_CLASS_NO_COPY(VmaMutexLock)
4692 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4693 m_pMutex(useMutex ? &mutex : VMA_NULL)
4694 {
if(m_pMutex) { m_pMutex->Lock(); } }
4696 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4698 VMA_MUTEX* m_pMutex;
4702 struct VmaMutexLockRead
4704 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4706 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4707 m_pMutex(useMutex ? &mutex : VMA_NULL)
4708 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4709 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4711 VMA_RW_MUTEX* m_pMutex;
4715 struct VmaMutexLockWrite
4717 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4719 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4720 m_pMutex(useMutex ? &mutex : VMA_NULL)
4721 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4722 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4724 VMA_RW_MUTEX* m_pMutex;
4727 #if VMA_DEBUG_GLOBAL_MUTEX
4728 static VMA_MUTEX gDebugGlobalMutex;
4729 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4731 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4735 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4746 template <
typename CmpLess,
typename IterT,
typename KeyT>
4747 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4749 size_t down = 0, up = (end - beg);
4752 const size_t mid = (down + up) / 2;
4753 if(cmp(*(beg+mid), key))
4765 template<
typename CmpLess,
typename IterT,
typename KeyT>
4766 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4768 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4769 beg, end, value, cmp);
4771 (!cmp(*it, value) && !cmp(value, *it)))
4783 template<
typename T>
4784 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4786 for(uint32_t i = 0; i < count; ++i)
4788 const T iPtr = arr[i];
4789 if(iPtr == VMA_NULL)
4793 for(uint32_t j = i + 1; j < count; ++j)
4804 template<
typename MainT,
typename NewT>
4805 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4807 newStruct->pNext = mainStruct->pNext;
4808 mainStruct->pNext = newStruct;
4814 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4816 void* result = VMA_NULL;
4817 if((pAllocationCallbacks != VMA_NULL) &&
4818 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4820 result = (*pAllocationCallbacks->pfnAllocation)(
4821 pAllocationCallbacks->pUserData,
4824 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4828 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4830 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4834 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4836 if((pAllocationCallbacks != VMA_NULL) &&
4837 (pAllocationCallbacks->pfnFree != VMA_NULL))
4839 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4843 VMA_SYSTEM_ALIGNED_FREE(ptr);
4847 template<
typename T>
4848 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4850 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4853 template<
typename T>
4854 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4856 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4859 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4861 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4863 template<
typename T>
4864 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4867 VmaFree(pAllocationCallbacks, ptr);
4870 template<
typename T>
4871 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4875 for(
size_t i = count; i--; )
4879 VmaFree(pAllocationCallbacks, ptr);
4883 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4885 if(srcStr != VMA_NULL)
4887 const size_t len = strlen(srcStr);
4888 char*
const result = vma_new_array(allocs,
char, len + 1);
4889 memcpy(result, srcStr, len + 1);
4898 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4902 const size_t len = strlen(str);
4903 vma_delete_array(allocs, str, len + 1);
4908 template<
typename T>
4909 class VmaStlAllocator
4912 const VkAllocationCallbacks*
const m_pCallbacks;
4913 typedef T value_type;
4915 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4916 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4918 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4919 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4921 template<
typename U>
4922 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4924 return m_pCallbacks == rhs.m_pCallbacks;
4926 template<
typename U>
4927 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4929 return m_pCallbacks != rhs.m_pCallbacks;
4932 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4935 #if VMA_USE_STL_VECTOR
4937 #define VmaVector std::vector
4939 template<
typename T,
typename allocatorT>
4940 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4942 vec.insert(vec.begin() + index, item);
4945 template<
typename T,
typename allocatorT>
4946 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4948 vec.erase(vec.begin() + index);
4951 #else // #if VMA_USE_STL_VECTOR
4956 template<
typename T,
typename AllocatorT>
4960 typedef T value_type;
4962 VmaVector(
const AllocatorT& allocator) :
4963 m_Allocator(allocator),
4970 VmaVector(
size_t count,
const AllocatorT& allocator) :
4971 m_Allocator(allocator),
4972 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4980 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4981 : VmaVector(count, allocator) {}
4983 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4984 m_Allocator(src.m_Allocator),
4985 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4986 m_Count(src.m_Count),
4987 m_Capacity(src.m_Count)
4991 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4997 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5000 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
5004 resize(rhs.m_Count);
5007 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
5013 bool empty()
const {
return m_Count == 0; }
5014 size_t size()
const {
return m_Count; }
5015 T* data() {
return m_pArray; }
5016 const T* data()
const {
return m_pArray; }
5018 T& operator[](
size_t index)
5020 VMA_HEAVY_ASSERT(index < m_Count);
5021 return m_pArray[index];
5023 const T& operator[](
size_t index)
const
5025 VMA_HEAVY_ASSERT(index < m_Count);
5026 return m_pArray[index];
5031 VMA_HEAVY_ASSERT(m_Count > 0);
5034 const T& front()
const
5036 VMA_HEAVY_ASSERT(m_Count > 0);
5041 VMA_HEAVY_ASSERT(m_Count > 0);
5042 return m_pArray[m_Count - 1];
5044 const T& back()
const
5046 VMA_HEAVY_ASSERT(m_Count > 0);
5047 return m_pArray[m_Count - 1];
5050 void reserve(
size_t newCapacity,
bool freeMemory =
false)
5052 newCapacity = VMA_MAX(newCapacity, m_Count);
5054 if((newCapacity < m_Capacity) && !freeMemory)
5056 newCapacity = m_Capacity;
5059 if(newCapacity != m_Capacity)
5061 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
5064 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
5066 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5067 m_Capacity = newCapacity;
5068 m_pArray = newArray;
5072 void resize(
size_t newCount,
bool freeMemory =
false)
5074 size_t newCapacity = m_Capacity;
5075 if(newCount > m_Capacity)
5077 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
5081 newCapacity = newCount;
5084 if(newCapacity != m_Capacity)
5086 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
5087 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
5088 if(elementsToCopy != 0)
5090 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
5092 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
5093 m_Capacity = newCapacity;
5094 m_pArray = newArray;
5100 void clear(
bool freeMemory =
false)
5102 resize(0, freeMemory);
5105 void insert(
size_t index,
const T& src)
5107 VMA_HEAVY_ASSERT(index <= m_Count);
5108 const size_t oldCount = size();
5109 resize(oldCount + 1);
5110 if(index < oldCount)
5112 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
5114 m_pArray[index] = src;
5117 void remove(
size_t index)
5119 VMA_HEAVY_ASSERT(index < m_Count);
5120 const size_t oldCount = size();
5121 if(index < oldCount - 1)
5123 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
5125 resize(oldCount - 1);
5128 void push_back(
const T& src)
5130 const size_t newIndex = size();
5131 resize(newIndex + 1);
5132 m_pArray[newIndex] = src;
5137 VMA_HEAVY_ASSERT(m_Count > 0);
5141 void push_front(
const T& src)
5148 VMA_HEAVY_ASSERT(m_Count > 0);
5152 typedef T* iterator;
5154 iterator begin() {
return m_pArray; }
5155 iterator end() {
return m_pArray + m_Count; }
5158 AllocatorT m_Allocator;
5164 template<
typename T,
typename allocatorT>
5165 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5167 vec.insert(index, item);
5170 template<
typename T,
typename allocatorT>
5171 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5176 #endif // #if VMA_USE_STL_VECTOR
5178 template<
typename CmpLess,
typename VectorT>
5179 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5181 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5183 vector.data() + vector.size(),
5185 CmpLess()) - vector.data();
5186 VmaVectorInsert(vector, indexToInsert, value);
5187 return indexToInsert;
5190 template<
typename CmpLess,
typename VectorT>
5191 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5194 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5199 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5201 size_t indexToRemove = it - vector.begin();
5202 VmaVectorRemove(vector, indexToRemove);
5219 template<
typename T,
typename AllocatorT,
size_t N>
5220 class VmaSmallVector
5223 typedef T value_type;
5225 VmaSmallVector(
const AllocatorT& allocator) :
5227 m_DynamicArray(allocator)
5230 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5232 m_DynamicArray(count > N ? count : 0, allocator)
5235 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5236 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5237 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5238 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5240 bool empty()
const {
return m_Count == 0; }
5241 size_t size()
const {
return m_Count; }
5242 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5243 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5245 T& operator[](
size_t index)
5247 VMA_HEAVY_ASSERT(index < m_Count);
5248 return data()[index];
5250 const T& operator[](
size_t index)
const
5252 VMA_HEAVY_ASSERT(index < m_Count);
5253 return data()[index];
5258 VMA_HEAVY_ASSERT(m_Count > 0);
5261 const T& front()
const
5263 VMA_HEAVY_ASSERT(m_Count > 0);
5268 VMA_HEAVY_ASSERT(m_Count > 0);
5269 return data()[m_Count - 1];
5271 const T& back()
const
5273 VMA_HEAVY_ASSERT(m_Count > 0);
5274 return data()[m_Count - 1];
5277 void resize(
size_t newCount,
bool freeMemory =
false)
5279 if(newCount > N && m_Count > N)
5282 m_DynamicArray.resize(newCount, freeMemory);
5284 else if(newCount > N && m_Count <= N)
5287 m_DynamicArray.resize(newCount, freeMemory);
5290 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5293 else if(newCount <= N && m_Count > N)
5298 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5300 m_DynamicArray.resize(0, freeMemory);
5309 void clear(
bool freeMemory =
false)
5311 m_DynamicArray.clear(freeMemory);
5315 void insert(
size_t index,
const T& src)
5317 VMA_HEAVY_ASSERT(index <= m_Count);
5318 const size_t oldCount = size();
5319 resize(oldCount + 1);
5320 T*
const dataPtr = data();
5321 if(index < oldCount)
5324 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5326 dataPtr[index] = src;
5329 void remove(
size_t index)
5331 VMA_HEAVY_ASSERT(index < m_Count);
5332 const size_t oldCount = size();
5333 if(index < oldCount - 1)
5336 T*
const dataPtr = data();
5337 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5339 resize(oldCount - 1);
5342 void push_back(
const T& src)
5344 const size_t newIndex = size();
5345 resize(newIndex + 1);
5346 data()[newIndex] = src;
5351 VMA_HEAVY_ASSERT(m_Count > 0);
5355 void push_front(
const T& src)
5362 VMA_HEAVY_ASSERT(m_Count > 0);
5366 typedef T* iterator;
5368 iterator begin() {
return data(); }
5369 iterator end() {
return data() + m_Count; }
5374 VmaVector<T, AllocatorT> m_DynamicArray;
5385 template<
typename T>
5386 class VmaPoolAllocator
5388 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5390 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5391 ~VmaPoolAllocator();
5392 template<
typename... Types> T* Alloc(Types... args);
5398 uint32_t NextFreeIndex;
5399 alignas(T)
char Value[
sizeof(T)];
5406 uint32_t FirstFreeIndex;
5409 const VkAllocationCallbacks* m_pAllocationCallbacks;
5410 const uint32_t m_FirstBlockCapacity;
5411 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5413 ItemBlock& CreateNewBlock();
5416 template<
typename T>
5417 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5418 m_pAllocationCallbacks(pAllocationCallbacks),
5419 m_FirstBlockCapacity(firstBlockCapacity),
5420 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5422 VMA_ASSERT(m_FirstBlockCapacity > 1);
5425 template<
typename T>
5426 VmaPoolAllocator<T>::~VmaPoolAllocator()
5428 for(
size_t i = m_ItemBlocks.size(); i--; )
5429 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5430 m_ItemBlocks.clear();
5433 template<
typename T>
5434 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5436 for(
size_t i = m_ItemBlocks.size(); i--; )
5438 ItemBlock& block = m_ItemBlocks[i];
5440 if(block.FirstFreeIndex != UINT32_MAX)
5442 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5443 block.FirstFreeIndex = pItem->NextFreeIndex;
5444 T* result = (T*)&pItem->Value;
5445 new(result)T(std::forward<Types>(args)...);
5451 ItemBlock& newBlock = CreateNewBlock();
5452 Item*
const pItem = &newBlock.pItems[0];
5453 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5454 T* result = (T*)&pItem->Value;
5455 new(result)T(std::forward<Types>(args)...);
5459 template<
typename T>
5460 void VmaPoolAllocator<T>::Free(T* ptr)
5463 for(
size_t i = m_ItemBlocks.size(); i--; )
5465 ItemBlock& block = m_ItemBlocks[i];
5469 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5472 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5475 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5476 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5477 block.FirstFreeIndex = index;
5481 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5484 template<
typename T>
5485 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5487 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5488 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5490 const ItemBlock newBlock = {
5491 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5495 m_ItemBlocks.push_back(newBlock);
5498 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5499 newBlock.pItems[i].NextFreeIndex = i + 1;
5500 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5501 return m_ItemBlocks.back();
5507 #if VMA_USE_STL_LIST
5509 #define VmaList std::list
5511 #else // #if VMA_USE_STL_LIST
5513 template<
typename T>
5522 template<
typename T>
5525 VMA_CLASS_NO_COPY(VmaRawList)
5527 typedef VmaListItem<T> ItemType;
5529 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5533 size_t GetCount()
const {
return m_Count; }
5534 bool IsEmpty()
const {
return m_Count == 0; }
5536 ItemType* Front() {
return m_pFront; }
5537 const ItemType* Front()
const {
return m_pFront; }
5538 ItemType* Back() {
return m_pBack; }
5539 const ItemType* Back()
const {
return m_pBack; }
5541 ItemType* PushBack();
5542 ItemType* PushFront();
5543 ItemType* PushBack(
const T& value);
5544 ItemType* PushFront(
const T& value);
5549 ItemType* InsertBefore(ItemType* pItem);
5551 ItemType* InsertAfter(ItemType* pItem);
5553 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5554 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5556 void Remove(ItemType* pItem);
5559 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5560 VmaPoolAllocator<ItemType> m_ItemAllocator;
5566 template<
typename T>
5567 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5568 m_pAllocationCallbacks(pAllocationCallbacks),
5569 m_ItemAllocator(pAllocationCallbacks, 128),
5576 template<
typename T>
5577 VmaRawList<T>::~VmaRawList()
5583 template<
typename T>
5584 void VmaRawList<T>::Clear()
5586 if(IsEmpty() ==
false)
5588 ItemType* pItem = m_pBack;
5589 while(pItem != VMA_NULL)
5591 ItemType*
const pPrevItem = pItem->pPrev;
5592 m_ItemAllocator.Free(pItem);
5595 m_pFront = VMA_NULL;
5601 template<
typename T>
5602 VmaListItem<T>* VmaRawList<T>::PushBack()
5604 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5605 pNewItem->pNext = VMA_NULL;
5608 pNewItem->pPrev = VMA_NULL;
5609 m_pFront = pNewItem;
5615 pNewItem->pPrev = m_pBack;
5616 m_pBack->pNext = pNewItem;
5623 template<
typename T>
5624 VmaListItem<T>* VmaRawList<T>::PushFront()
5626 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5627 pNewItem->pPrev = VMA_NULL;
5630 pNewItem->pNext = VMA_NULL;
5631 m_pFront = pNewItem;
5637 pNewItem->pNext = m_pFront;
5638 m_pFront->pPrev = pNewItem;
5639 m_pFront = pNewItem;
5645 template<
typename T>
5646 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5648 ItemType*
const pNewItem = PushBack();
5649 pNewItem->Value = value;
5653 template<
typename T>
5654 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5656 ItemType*
const pNewItem = PushFront();
5657 pNewItem->Value = value;
5661 template<
typename T>
5662 void VmaRawList<T>::PopBack()
5664 VMA_HEAVY_ASSERT(m_Count > 0);
5665 ItemType*
const pBackItem = m_pBack;
5666 ItemType*
const pPrevItem = pBackItem->pPrev;
5667 if(pPrevItem != VMA_NULL)
5669 pPrevItem->pNext = VMA_NULL;
5671 m_pBack = pPrevItem;
5672 m_ItemAllocator.Free(pBackItem);
5676 template<
typename T>
5677 void VmaRawList<T>::PopFront()
5679 VMA_HEAVY_ASSERT(m_Count > 0);
5680 ItemType*
const pFrontItem = m_pFront;
5681 ItemType*
const pNextItem = pFrontItem->pNext;
5682 if(pNextItem != VMA_NULL)
5684 pNextItem->pPrev = VMA_NULL;
5686 m_pFront = pNextItem;
5687 m_ItemAllocator.Free(pFrontItem);
5691 template<
typename T>
5692 void VmaRawList<T>::Remove(ItemType* pItem)
5694 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5695 VMA_HEAVY_ASSERT(m_Count > 0);
5697 if(pItem->pPrev != VMA_NULL)
5699 pItem->pPrev->pNext = pItem->pNext;
5703 VMA_HEAVY_ASSERT(m_pFront == pItem);
5704 m_pFront = pItem->pNext;
5707 if(pItem->pNext != VMA_NULL)
5709 pItem->pNext->pPrev = pItem->pPrev;
5713 VMA_HEAVY_ASSERT(m_pBack == pItem);
5714 m_pBack = pItem->pPrev;
5717 m_ItemAllocator.Free(pItem);
5721 template<
typename T>
5722 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5724 if(pItem != VMA_NULL)
5726 ItemType*
const prevItem = pItem->pPrev;
5727 ItemType*
const newItem = m_ItemAllocator.Alloc();
5728 newItem->pPrev = prevItem;
5729 newItem->pNext = pItem;
5730 pItem->pPrev = newItem;
5731 if(prevItem != VMA_NULL)
5733 prevItem->pNext = newItem;
5737 VMA_HEAVY_ASSERT(m_pFront == pItem);
5747 template<
typename T>
5748 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5750 if(pItem != VMA_NULL)
5752 ItemType*
const nextItem = pItem->pNext;
5753 ItemType*
const newItem = m_ItemAllocator.Alloc();
5754 newItem->pNext = nextItem;
5755 newItem->pPrev = pItem;
5756 pItem->pNext = newItem;
5757 if(nextItem != VMA_NULL)
5759 nextItem->pPrev = newItem;
5763 VMA_HEAVY_ASSERT(m_pBack == pItem);
5773 template<
typename T>
5774 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5776 ItemType*
const newItem = InsertBefore(pItem);
5777 newItem->Value = value;
5781 template<
typename T>
5782 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5784 ItemType*
const newItem = InsertAfter(pItem);
5785 newItem->Value = value;
5789 template<
typename T,
typename AllocatorT>
5792 VMA_CLASS_NO_COPY(VmaList)
5803 T& operator*()
const
5805 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5806 return m_pItem->Value;
5808 T* operator->()
const
5810 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5811 return &m_pItem->Value;
5814 iterator& operator++()
5816 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5817 m_pItem = m_pItem->pNext;
5820 iterator& operator--()
5822 if(m_pItem != VMA_NULL)
5824 m_pItem = m_pItem->pPrev;
5828 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5829 m_pItem = m_pList->Back();
5834 iterator operator++(
int)
5836 iterator result = *
this;
5840 iterator operator--(
int)
5842 iterator result = *
this;
5847 bool operator==(
const iterator& rhs)
const
5849 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5850 return m_pItem == rhs.m_pItem;
5852 bool operator!=(
const iterator& rhs)
const
5854 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5855 return m_pItem != rhs.m_pItem;
5859 VmaRawList<T>* m_pList;
5860 VmaListItem<T>* m_pItem;
5862 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5868 friend class VmaList<T, AllocatorT>;
5871 class const_iterator
5880 const_iterator(
const iterator& src) :
5881 m_pList(src.m_pList),
5882 m_pItem(src.m_pItem)
5886 const T& operator*()
const
5888 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5889 return m_pItem->Value;
5891 const T* operator->()
const
5893 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5894 return &m_pItem->Value;
5897 const_iterator& operator++()
5899 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5900 m_pItem = m_pItem->pNext;
5903 const_iterator& operator--()
5905 if(m_pItem != VMA_NULL)
5907 m_pItem = m_pItem->pPrev;
5911 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5912 m_pItem = m_pList->Back();
5917 const_iterator operator++(
int)
5919 const_iterator result = *
this;
5923 const_iterator operator--(
int)
5925 const_iterator result = *
this;
5930 bool operator==(
const const_iterator& rhs)
const
5932 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5933 return m_pItem == rhs.m_pItem;
5935 bool operator!=(
const const_iterator& rhs)
const
5937 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5938 return m_pItem != rhs.m_pItem;
5942 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5948 const VmaRawList<T>* m_pList;
5949 const VmaListItem<T>* m_pItem;
5951 friend class VmaList<T, AllocatorT>;
5954 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5956 bool empty()
const {
return m_RawList.IsEmpty(); }
5957 size_t size()
const {
return m_RawList.GetCount(); }
5959 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5960 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5962 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5963 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5965 void clear() { m_RawList.Clear(); }
5966 void push_back(
const T& value) { m_RawList.PushBack(value); }
5967 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5968 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5971 VmaRawList<T> m_RawList;
5974 #endif // #if VMA_USE_STL_LIST
5982 #if VMA_USE_STL_UNORDERED_MAP
5984 #define VmaPair std::pair
5986 #define VMA_MAP_TYPE(KeyT, ValueT) \
5987 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5989 #else // #if VMA_USE_STL_UNORDERED_MAP
5991 template<
typename T1,
typename T2>
5997 VmaPair() : first(), second() { }
5998 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
6004 template<
typename KeyT,
typename ValueT>
6008 typedef VmaPair<KeyT, ValueT> PairType;
6009 typedef PairType* iterator;
6011 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
6013 iterator begin() {
return m_Vector.begin(); }
6014 iterator end() {
return m_Vector.end(); }
6016 void insert(
const PairType& pair);
6017 iterator find(
const KeyT& key);
6018 void erase(iterator it);
6021 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
6024 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
6026 template<
typename FirstT,
typename SecondT>
6027 struct VmaPairFirstLess
6029 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
6031 return lhs.first < rhs.first;
6033 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
6035 return lhs.first < rhsFirst;
6039 template<
typename KeyT,
typename ValueT>
6040 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
6042 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
6044 m_Vector.data() + m_Vector.size(),
6046 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
6047 VmaVectorInsert(m_Vector, indexToInsert, pair);
6050 template<
typename KeyT,
typename ValueT>
6051 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
6053 PairType* it = VmaBinaryFindFirstNotLess(
6055 m_Vector.data() + m_Vector.size(),
6057 VmaPairFirstLess<KeyT, ValueT>());
6058 if((it != m_Vector.end()) && (it->first == key))
6064 return m_Vector.end();
6068 template<
typename KeyT,
typename ValueT>
6069 void VmaMap<KeyT, ValueT>::erase(iterator it)
6071 VmaVectorRemove(m_Vector, it - m_Vector.begin());
6074 #endif // #if VMA_USE_STL_UNORDERED_MAP
6080 class VmaDeviceMemoryBlock;
6082 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
6084 struct VmaAllocation_T
6087 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
6091 FLAG_USER_DATA_STRING = 0x01,
6095 enum ALLOCATION_TYPE
6097 ALLOCATION_TYPE_NONE,
6098 ALLOCATION_TYPE_BLOCK,
6099 ALLOCATION_TYPE_DEDICATED,
6106 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
6109 m_pUserData{VMA_NULL},
6110 m_LastUseFrameIndex{currentFrameIndex},
6111 m_MemoryTypeIndex{0},
6112 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
6113 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
6115 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
6117 #if VMA_STATS_STRING_ENABLED
6118 m_CreationFrameIndex = currentFrameIndex;
6119 m_BufferImageUsage = 0;
6125 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
6128 VMA_ASSERT(m_pUserData == VMA_NULL);
6131 void InitBlockAllocation(
6132 VmaDeviceMemoryBlock* block,
6133 VkDeviceSize offset,
6134 VkDeviceSize alignment,
6136 uint32_t memoryTypeIndex,
6137 VmaSuballocationType suballocationType,
6141 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6142 VMA_ASSERT(block != VMA_NULL);
6143 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6144 m_Alignment = alignment;
6146 m_MemoryTypeIndex = memoryTypeIndex;
6147 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6148 m_SuballocationType = (uint8_t)suballocationType;
6149 m_BlockAllocation.m_Block = block;
6150 m_BlockAllocation.m_Offset = offset;
6151 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6156 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6157 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6158 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6159 m_MemoryTypeIndex = 0;
6160 m_BlockAllocation.m_Block = VMA_NULL;
6161 m_BlockAllocation.m_Offset = 0;
6162 m_BlockAllocation.m_CanBecomeLost =
true;
6165 void ChangeBlockAllocation(
6167 VmaDeviceMemoryBlock* block,
6168 VkDeviceSize offset);
6170 void ChangeOffset(VkDeviceSize newOffset);
6173 void InitDedicatedAllocation(
6174 uint32_t memoryTypeIndex,
6175 VkDeviceMemory hMemory,
6176 VmaSuballocationType suballocationType,
6180 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6181 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6182 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6185 m_MemoryTypeIndex = memoryTypeIndex;
6186 m_SuballocationType = (uint8_t)suballocationType;
6187 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6188 m_DedicatedAllocation.m_hMemory = hMemory;
6189 m_DedicatedAllocation.m_pMappedData = pMappedData;
6192 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6193 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6194 VkDeviceSize GetSize()
const {
return m_Size; }
6195 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6196 void* GetUserData()
const {
return m_pUserData; }
6197 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6198 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6200 VmaDeviceMemoryBlock* GetBlock()
const
6202 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6203 return m_BlockAllocation.m_Block;
6205 VkDeviceSize GetOffset()
const;
6206 VkDeviceMemory GetMemory()
const;
6207 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6208 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6209 void* GetMappedData()
const;
6210 bool CanBecomeLost()
const;
6212 uint32_t GetLastUseFrameIndex()
const
6214 return m_LastUseFrameIndex.load();
6216 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6218 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6228 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6230 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6232 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6243 void BlockAllocMap();
6244 void BlockAllocUnmap();
6245 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6248 #if VMA_STATS_STRING_ENABLED
6249 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6250 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6252 void InitBufferImageUsage(uint32_t bufferImageUsage)
6254 VMA_ASSERT(m_BufferImageUsage == 0);
6255 m_BufferImageUsage = bufferImageUsage;
6258 void PrintParameters(
class VmaJsonWriter& json)
const;
6262 VkDeviceSize m_Alignment;
6263 VkDeviceSize m_Size;
6265 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6266 uint32_t m_MemoryTypeIndex;
6268 uint8_t m_SuballocationType;
6275 struct BlockAllocation
6277 VmaDeviceMemoryBlock* m_Block;
6278 VkDeviceSize m_Offset;
6279 bool m_CanBecomeLost;
6283 struct DedicatedAllocation
6285 VkDeviceMemory m_hMemory;
6286 void* m_pMappedData;
6292 BlockAllocation m_BlockAllocation;
6294 DedicatedAllocation m_DedicatedAllocation;
6297 #if VMA_STATS_STRING_ENABLED
6298 uint32_t m_CreationFrameIndex;
6299 uint32_t m_BufferImageUsage;
6309 struct VmaSuballocation
6311 VkDeviceSize offset;
6314 VmaSuballocationType type;
6318 struct VmaSuballocationOffsetLess
6320 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6322 return lhs.offset < rhs.offset;
6325 struct VmaSuballocationOffsetGreater
6327 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6329 return lhs.offset > rhs.offset;
6333 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6336 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6338 enum class VmaAllocationRequestType
6360 struct VmaAllocationRequest
6362 VkDeviceSize offset;
6363 VkDeviceSize sumFreeSize;
6364 VkDeviceSize sumItemSize;
6365 VmaSuballocationList::iterator item;
6366 size_t itemsToMakeLostCount;
6368 VmaAllocationRequestType type;
6370 VkDeviceSize CalcCost()
const
6372 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6380 class VmaBlockMetadata
6384 virtual ~VmaBlockMetadata() { }
6385 virtual void Init(VkDeviceSize size) { m_Size = size; }
6388 virtual bool Validate()
const = 0;
6389 VkDeviceSize GetSize()
const {
return m_Size; }
6390 virtual size_t GetAllocationCount()
const = 0;
6391 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6392 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6394 virtual bool IsEmpty()
const = 0;
6396 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6398 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6400 #if VMA_STATS_STRING_ENABLED
6401 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6407 virtual bool CreateAllocationRequest(
6408 uint32_t currentFrameIndex,
6409 uint32_t frameInUseCount,
6410 VkDeviceSize bufferImageGranularity,
6411 VkDeviceSize allocSize,
6412 VkDeviceSize allocAlignment,
6414 VmaSuballocationType allocType,
6415 bool canMakeOtherLost,
6418 VmaAllocationRequest* pAllocationRequest) = 0;
6420 virtual bool MakeRequestedAllocationsLost(
6421 uint32_t currentFrameIndex,
6422 uint32_t frameInUseCount,
6423 VmaAllocationRequest* pAllocationRequest) = 0;
6425 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6427 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6431 const VmaAllocationRequest& request,
6432 VmaSuballocationType type,
6433 VkDeviceSize allocSize,
6438 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6441 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6443 #if VMA_STATS_STRING_ENABLED
6444 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6445 VkDeviceSize unusedBytes,
6446 size_t allocationCount,
6447 size_t unusedRangeCount)
const;
6448 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6449 VkDeviceSize offset,
6451 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6452 VkDeviceSize offset,
6453 VkDeviceSize size)
const;
6454 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6458 VkDeviceSize m_Size;
6459 const VkAllocationCallbacks* m_pAllocationCallbacks;
6462 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6463 VMA_ASSERT(0 && "Validation failed: " #cond); \
6467 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6469 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6472 virtual ~VmaBlockMetadata_Generic();
6473 virtual void Init(VkDeviceSize size);
6475 virtual bool Validate()
const;
6476 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6477 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6478 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6479 virtual bool IsEmpty()
const;
6481 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6482 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6484 #if VMA_STATS_STRING_ENABLED
6485 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6488 virtual bool CreateAllocationRequest(
6489 uint32_t currentFrameIndex,
6490 uint32_t frameInUseCount,
6491 VkDeviceSize bufferImageGranularity,
6492 VkDeviceSize allocSize,
6493 VkDeviceSize allocAlignment,
6495 VmaSuballocationType allocType,
6496 bool canMakeOtherLost,
6498 VmaAllocationRequest* pAllocationRequest);
6500 virtual bool MakeRequestedAllocationsLost(
6501 uint32_t currentFrameIndex,
6502 uint32_t frameInUseCount,
6503 VmaAllocationRequest* pAllocationRequest);
6505 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6507 virtual VkResult CheckCorruption(
const void* pBlockData);
6510 const VmaAllocationRequest& request,
6511 VmaSuballocationType type,
6512 VkDeviceSize allocSize,
6516 virtual void FreeAtOffset(VkDeviceSize offset);
6521 bool IsBufferImageGranularityConflictPossible(
6522 VkDeviceSize bufferImageGranularity,
6523 VmaSuballocationType& inOutPrevSuballocType)
const;
6526 friend class VmaDefragmentationAlgorithm_Generic;
6527 friend class VmaDefragmentationAlgorithm_Fast;
6529 uint32_t m_FreeCount;
6530 VkDeviceSize m_SumFreeSize;
6531 VmaSuballocationList m_Suballocations;
6534 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6536 bool ValidateFreeSuballocationList()
const;
6540 bool CheckAllocation(
6541 uint32_t currentFrameIndex,
6542 uint32_t frameInUseCount,
6543 VkDeviceSize bufferImageGranularity,
6544 VkDeviceSize allocSize,
6545 VkDeviceSize allocAlignment,
6546 VmaSuballocationType allocType,
6547 VmaSuballocationList::const_iterator suballocItem,
6548 bool canMakeOtherLost,
6549 VkDeviceSize* pOffset,
6550 size_t* itemsToMakeLostCount,
6551 VkDeviceSize* pSumFreeSize,
6552 VkDeviceSize* pSumItemSize)
const;
6554 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6558 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6561 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6564 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6645 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6647 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6650 virtual ~VmaBlockMetadata_Linear();
6651 virtual void Init(VkDeviceSize size);
6653 virtual bool Validate()
const;
6654 virtual size_t GetAllocationCount()
const;
6655 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6656 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6657 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6659 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6660 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6662 #if VMA_STATS_STRING_ENABLED
6663 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6666 virtual bool CreateAllocationRequest(
6667 uint32_t currentFrameIndex,
6668 uint32_t frameInUseCount,
6669 VkDeviceSize bufferImageGranularity,
6670 VkDeviceSize allocSize,
6671 VkDeviceSize allocAlignment,
6673 VmaSuballocationType allocType,
6674 bool canMakeOtherLost,
6676 VmaAllocationRequest* pAllocationRequest);
6678 virtual bool MakeRequestedAllocationsLost(
6679 uint32_t currentFrameIndex,
6680 uint32_t frameInUseCount,
6681 VmaAllocationRequest* pAllocationRequest);
6683 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6685 virtual VkResult CheckCorruption(
const void* pBlockData);
6688 const VmaAllocationRequest& request,
6689 VmaSuballocationType type,
6690 VkDeviceSize allocSize,
6694 virtual void FreeAtOffset(VkDeviceSize offset);
6704 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6706 enum SECOND_VECTOR_MODE
6708 SECOND_VECTOR_EMPTY,
6713 SECOND_VECTOR_RING_BUFFER,
6719 SECOND_VECTOR_DOUBLE_STACK,
6722 VkDeviceSize m_SumFreeSize;
6723 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6724 uint32_t m_1stVectorIndex;
6725 SECOND_VECTOR_MODE m_2ndVectorMode;
6727 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6728 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6729 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6730 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6733 size_t m_1stNullItemsBeginCount;
6735 size_t m_1stNullItemsMiddleCount;
6737 size_t m_2ndNullItemsCount;
6739 bool ShouldCompact1st()
const;
6740 void CleanupAfterFree();
6742 bool CreateAllocationRequest_LowerAddress(
6743 uint32_t currentFrameIndex,
6744 uint32_t frameInUseCount,
6745 VkDeviceSize bufferImageGranularity,
6746 VkDeviceSize allocSize,
6747 VkDeviceSize allocAlignment,
6748 VmaSuballocationType allocType,
6749 bool canMakeOtherLost,
6751 VmaAllocationRequest* pAllocationRequest);
6752 bool CreateAllocationRequest_UpperAddress(
6753 uint32_t currentFrameIndex,
6754 uint32_t frameInUseCount,
6755 VkDeviceSize bufferImageGranularity,
6756 VkDeviceSize allocSize,
6757 VkDeviceSize allocAlignment,
6758 VmaSuballocationType allocType,
6759 bool canMakeOtherLost,
6761 VmaAllocationRequest* pAllocationRequest);
6775 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6777 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6780 virtual ~VmaBlockMetadata_Buddy();
6781 virtual void Init(VkDeviceSize size);
6783 virtual bool Validate()
const;
6784 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6785 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6786 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6787 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6789 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6790 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6792 #if VMA_STATS_STRING_ENABLED
6793 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6796 virtual bool CreateAllocationRequest(
6797 uint32_t currentFrameIndex,
6798 uint32_t frameInUseCount,
6799 VkDeviceSize bufferImageGranularity,
6800 VkDeviceSize allocSize,
6801 VkDeviceSize allocAlignment,
6803 VmaSuballocationType allocType,
6804 bool canMakeOtherLost,
6806 VmaAllocationRequest* pAllocationRequest);
6808 virtual bool MakeRequestedAllocationsLost(
6809 uint32_t currentFrameIndex,
6810 uint32_t frameInUseCount,
6811 VmaAllocationRequest* pAllocationRequest);
6813 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6815 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6818 const VmaAllocationRequest& request,
6819 VmaSuballocationType type,
6820 VkDeviceSize allocSize,
6823 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6824 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6827 static const VkDeviceSize MIN_NODE_SIZE = 32;
6828 static const size_t MAX_LEVELS = 30;
6830 struct ValidationContext
6832 size_t calculatedAllocationCount;
6833 size_t calculatedFreeCount;
6834 VkDeviceSize calculatedSumFreeSize;
6836 ValidationContext() :
6837 calculatedAllocationCount(0),
6838 calculatedFreeCount(0),
6839 calculatedSumFreeSize(0) { }
6844 VkDeviceSize offset;
6874 VkDeviceSize m_UsableSize;
6875 uint32_t m_LevelCount;
6881 } m_FreeList[MAX_LEVELS];
6883 size_t m_AllocationCount;
6887 VkDeviceSize m_SumFreeSize;
6889 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6890 void DeleteNode(Node* node);
6891 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6892 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6893 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6895 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6896 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6900 void AddToFreeListFront(uint32_t level, Node* node);
6904 void RemoveFromFreeList(uint32_t level, Node* node);
6906 #if VMA_STATS_STRING_ENABLED
6907 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6917 class VmaDeviceMemoryBlock
6919 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6921 VmaBlockMetadata* m_pMetadata;
6925 ~VmaDeviceMemoryBlock()
6927 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6928 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6935 uint32_t newMemoryTypeIndex,
6936 VkDeviceMemory newMemory,
6937 VkDeviceSize newSize,
6939 uint32_t algorithm);
6943 VmaPool GetParentPool()
const {
return m_hParentPool; }
6944 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6945 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6946 uint32_t GetId()
const {
return m_Id; }
6947 void* GetMappedData()
const {
return m_pMappedData; }
6950 bool Validate()
const;
6955 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6958 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6959 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6961 VkResult BindBufferMemory(
6964 VkDeviceSize allocationLocalOffset,
6967 VkResult BindImageMemory(
6970 VkDeviceSize allocationLocalOffset,
6976 uint32_t m_MemoryTypeIndex;
6978 VkDeviceMemory m_hMemory;
6986 uint32_t m_MapCount;
6987 void* m_pMappedData;
6990 struct VmaPointerLess
6992 bool operator()(
const void* lhs,
const void* rhs)
const
6998 struct VmaDefragmentationMove
7000 size_t srcBlockIndex;
7001 size_t dstBlockIndex;
7002 VkDeviceSize srcOffset;
7003 VkDeviceSize dstOffset;
7006 VmaDeviceMemoryBlock* pSrcBlock;
7007 VmaDeviceMemoryBlock* pDstBlock;
7010 class VmaDefragmentationAlgorithm;
7018 struct VmaBlockVector
7020 VMA_CLASS_NO_COPY(VmaBlockVector)
7025 uint32_t memoryTypeIndex,
7026 VkDeviceSize preferredBlockSize,
7027 size_t minBlockCount,
7028 size_t maxBlockCount,
7029 VkDeviceSize bufferImageGranularity,
7030 uint32_t frameInUseCount,
7031 bool explicitBlockSize,
7032 uint32_t algorithm);
7035 VkResult CreateMinBlocks();
7037 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
7038 VmaPool GetParentPool()
const {
return m_hParentPool; }
7039 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
7040 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
7041 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
7042 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
7043 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
7044 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
7049 bool IsCorruptionDetectionEnabled()
const;
7052 uint32_t currentFrameIndex,
7054 VkDeviceSize alignment,
7056 VmaSuballocationType suballocType,
7057 size_t allocationCount,
7065 #if VMA_STATS_STRING_ENABLED
7066 void PrintDetailedMap(
class VmaJsonWriter& json);
7069 void MakePoolAllocationsLost(
7070 uint32_t currentFrameIndex,
7071 size_t* pLostAllocationCount);
7072 VkResult CheckCorruption();
7076 class VmaBlockVectorDefragmentationContext* pCtx,
7078 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
7079 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
7080 VkCommandBuffer commandBuffer);
7081 void DefragmentationEnd(
7082 class VmaBlockVectorDefragmentationContext* pCtx,
7086 uint32_t ProcessDefragmentations(
7087 class VmaBlockVectorDefragmentationContext *pCtx,
7090 void CommitDefragmentations(
7091 class VmaBlockVectorDefragmentationContext *pCtx,
7097 size_t GetBlockCount()
const {
return m_Blocks.size(); }
7098 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
7099 size_t CalcAllocationCount()
const;
7100 bool IsBufferImageGranularityConflictPossible()
const;
7103 friend class VmaDefragmentationAlgorithm_Generic;
7107 const uint32_t m_MemoryTypeIndex;
7108 const VkDeviceSize m_PreferredBlockSize;
7109 const size_t m_MinBlockCount;
7110 const size_t m_MaxBlockCount;
7111 const VkDeviceSize m_BufferImageGranularity;
7112 const uint32_t m_FrameInUseCount;
7113 const bool m_ExplicitBlockSize;
7114 const uint32_t m_Algorithm;
7115 VMA_RW_MUTEX m_Mutex;
7119 bool m_HasEmptyBlock;
7121 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
7122 uint32_t m_NextBlockId;
7124 VkDeviceSize CalcMaxBlockSize()
const;
7127 void Remove(VmaDeviceMemoryBlock* pBlock);
7131 void IncrementallySortBlocks();
7133 VkResult AllocatePage(
7134 uint32_t currentFrameIndex,
7136 VkDeviceSize alignment,
7138 VmaSuballocationType suballocType,
7142 VkResult AllocateFromBlock(
7143 VmaDeviceMemoryBlock* pBlock,
7144 uint32_t currentFrameIndex,
7146 VkDeviceSize alignment,
7149 VmaSuballocationType suballocType,
7153 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7156 void ApplyDefragmentationMovesCpu(
7157 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7158 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7160 void ApplyDefragmentationMovesGpu(
7161 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7162 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7163 VkCommandBuffer commandBuffer);
7171 void UpdateHasEmptyBlock();
7176 VMA_CLASS_NO_COPY(VmaPool_T)
7178 VmaBlockVector m_BlockVector;
7183 VkDeviceSize preferredBlockSize);
7186 uint32_t GetId()
const {
return m_Id; }
7187 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7189 const char* GetName()
const {
return m_Name; }
7190 void SetName(
const char* pName);
7192 #if VMA_STATS_STRING_ENABLED
7208 class VmaDefragmentationAlgorithm
7210 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7212 VmaDefragmentationAlgorithm(
7214 VmaBlockVector* pBlockVector,
7215 uint32_t currentFrameIndex) :
7216 m_hAllocator(hAllocator),
7217 m_pBlockVector(pBlockVector),
7218 m_CurrentFrameIndex(currentFrameIndex)
7221 virtual ~VmaDefragmentationAlgorithm()
7225 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7226 virtual void AddAll() = 0;
7228 virtual VkResult Defragment(
7229 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7230 VkDeviceSize maxBytesToMove,
7231 uint32_t maxAllocationsToMove,
7234 virtual VkDeviceSize GetBytesMoved()
const = 0;
7235 virtual uint32_t GetAllocationsMoved()
const = 0;
7239 VmaBlockVector*
const m_pBlockVector;
7240 const uint32_t m_CurrentFrameIndex;
7242 struct AllocationInfo
7245 VkBool32* m_pChanged;
7248 m_hAllocation(VK_NULL_HANDLE),
7249 m_pChanged(VMA_NULL)
7253 m_hAllocation(hAlloc),
7254 m_pChanged(pChanged)
7260 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7262 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7264 VmaDefragmentationAlgorithm_Generic(
7266 VmaBlockVector* pBlockVector,
7267 uint32_t currentFrameIndex,
7268 bool overlappingMoveSupported);
7269 virtual ~VmaDefragmentationAlgorithm_Generic();
7271 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7272 virtual void AddAll() { m_AllAllocations =
true; }
7274 virtual VkResult Defragment(
7275 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7276 VkDeviceSize maxBytesToMove,
7277 uint32_t maxAllocationsToMove,
7280 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7281 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7284 uint32_t m_AllocationCount;
7285 bool m_AllAllocations;
7287 VkDeviceSize m_BytesMoved;
7288 uint32_t m_AllocationsMoved;
7290 struct AllocationInfoSizeGreater
7292 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7294 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7298 struct AllocationInfoOffsetGreater
7300 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7302 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7308 size_t m_OriginalBlockIndex;
7309 VmaDeviceMemoryBlock* m_pBlock;
7310 bool m_HasNonMovableAllocations;
7311 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7313 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7314 m_OriginalBlockIndex(SIZE_MAX),
7316 m_HasNonMovableAllocations(true),
7317 m_Allocations(pAllocationCallbacks)
7321 void CalcHasNonMovableAllocations()
7323 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7324 const size_t defragmentAllocCount = m_Allocations.size();
7325 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7328 void SortAllocationsBySizeDescending()
7330 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7333 void SortAllocationsByOffsetDescending()
7335 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7339 struct BlockPointerLess
7341 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7343 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7345 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7347 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7353 struct BlockInfoCompareMoveDestination
7355 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7357 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7361 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7365 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7373 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7374 BlockInfoVector m_Blocks;
7376 VkResult DefragmentRound(
7377 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7378 VkDeviceSize maxBytesToMove,
7379 uint32_t maxAllocationsToMove,
7380 bool freeOldAllocations);
7382 size_t CalcBlocksWithNonMovableCount()
const;
7384 static bool MoveMakesSense(
7385 size_t dstBlockIndex, VkDeviceSize dstOffset,
7386 size_t srcBlockIndex, VkDeviceSize srcOffset);
7389 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7391 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7393 VmaDefragmentationAlgorithm_Fast(
7395 VmaBlockVector* pBlockVector,
7396 uint32_t currentFrameIndex,
7397 bool overlappingMoveSupported);
7398 virtual ~VmaDefragmentationAlgorithm_Fast();
7400 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7401 virtual void AddAll() { m_AllAllocations =
true; }
7403 virtual VkResult Defragment(
7404 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7405 VkDeviceSize maxBytesToMove,
7406 uint32_t maxAllocationsToMove,
7409 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7410 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7415 size_t origBlockIndex;
7418 class FreeSpaceDatabase
7424 s.blockInfoIndex = SIZE_MAX;
7425 for(
size_t i = 0; i < MAX_COUNT; ++i)
7427 m_FreeSpaces[i] = s;
7431 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7433 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7439 size_t bestIndex = SIZE_MAX;
7440 for(
size_t i = 0; i < MAX_COUNT; ++i)
7443 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7448 if(m_FreeSpaces[i].size < size &&
7449 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7455 if(bestIndex != SIZE_MAX)
7457 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7458 m_FreeSpaces[bestIndex].offset = offset;
7459 m_FreeSpaces[bestIndex].size = size;
7463 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7464 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7466 size_t bestIndex = SIZE_MAX;
7467 VkDeviceSize bestFreeSpaceAfter = 0;
7468 for(
size_t i = 0; i < MAX_COUNT; ++i)
7471 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7473 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7475 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7477 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7479 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7482 bestFreeSpaceAfter = freeSpaceAfter;
7488 if(bestIndex != SIZE_MAX)
7490 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7491 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7493 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7496 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7497 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7498 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7503 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7513 static const size_t MAX_COUNT = 4;
7517 size_t blockInfoIndex;
7518 VkDeviceSize offset;
7520 } m_FreeSpaces[MAX_COUNT];
7523 const bool m_OverlappingMoveSupported;
7525 uint32_t m_AllocationCount;
7526 bool m_AllAllocations;
7528 VkDeviceSize m_BytesMoved;
7529 uint32_t m_AllocationsMoved;
7531 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7533 void PreprocessMetadata();
7534 void PostprocessMetadata();
7535 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7538 struct VmaBlockDefragmentationContext
7542 BLOCK_FLAG_USED = 0x00000001,
7548 class VmaBlockVectorDefragmentationContext
7550 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7554 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7555 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7556 uint32_t defragmentationMovesProcessed;
7557 uint32_t defragmentationMovesCommitted;
7558 bool hasDefragmentationPlan;
7560 VmaBlockVectorDefragmentationContext(
7563 VmaBlockVector* pBlockVector,
7564 uint32_t currFrameIndex);
7565 ~VmaBlockVectorDefragmentationContext();
7567 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7568 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7569 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7571 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7572 void AddAll() { m_AllAllocations =
true; }
7581 VmaBlockVector*
const m_pBlockVector;
7582 const uint32_t m_CurrFrameIndex;
7584 VmaDefragmentationAlgorithm* m_pAlgorithm;
7592 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7593 bool m_AllAllocations;
7596 struct VmaDefragmentationContext_T
7599 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7601 VmaDefragmentationContext_T(
7603 uint32_t currFrameIndex,
7606 ~VmaDefragmentationContext_T();
7608 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7609 void AddAllocations(
7610 uint32_t allocationCount,
7612 VkBool32* pAllocationsChanged);
7620 VkResult Defragment(
7621 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7622 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7626 VkResult DefragmentPassEnd();
7630 const uint32_t m_CurrFrameIndex;
7631 const uint32_t m_Flags;
7634 VkDeviceSize m_MaxCpuBytesToMove;
7635 uint32_t m_MaxCpuAllocationsToMove;
7636 VkDeviceSize m_MaxGpuBytesToMove;
7637 uint32_t m_MaxGpuAllocationsToMove;
7640 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7642 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7645 #if VMA_RECORDING_ENABLED
7652 void WriteConfiguration(
7653 const VkPhysicalDeviceProperties& devProps,
7654 const VkPhysicalDeviceMemoryProperties& memProps,
7655 uint32_t vulkanApiVersion,
7656 bool dedicatedAllocationExtensionEnabled,
7657 bool bindMemory2ExtensionEnabled,
7658 bool memoryBudgetExtensionEnabled,
7659 bool deviceCoherentMemoryExtensionEnabled);
7662 void RecordCreateAllocator(uint32_t frameIndex);
7663 void RecordDestroyAllocator(uint32_t frameIndex);
7664 void RecordCreatePool(uint32_t frameIndex,
7667 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7668 void RecordAllocateMemory(uint32_t frameIndex,
7669 const VkMemoryRequirements& vkMemReq,
7672 void RecordAllocateMemoryPages(uint32_t frameIndex,
7673 const VkMemoryRequirements& vkMemReq,
7675 uint64_t allocationCount,
7677 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7678 const VkMemoryRequirements& vkMemReq,
7679 bool requiresDedicatedAllocation,
7680 bool prefersDedicatedAllocation,
7683 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7684 const VkMemoryRequirements& vkMemReq,
7685 bool requiresDedicatedAllocation,
7686 bool prefersDedicatedAllocation,
7689 void RecordFreeMemory(uint32_t frameIndex,
7691 void RecordFreeMemoryPages(uint32_t frameIndex,
7692 uint64_t allocationCount,
7694 void RecordSetAllocationUserData(uint32_t frameIndex,
7696 const void* pUserData);
7697 void RecordCreateLostAllocation(uint32_t frameIndex,
7699 void RecordMapMemory(uint32_t frameIndex,
7701 void RecordUnmapMemory(uint32_t frameIndex,
7703 void RecordFlushAllocation(uint32_t frameIndex,
7704 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7705 void RecordInvalidateAllocation(uint32_t frameIndex,
7706 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7707 void RecordCreateBuffer(uint32_t frameIndex,
7708 const VkBufferCreateInfo& bufCreateInfo,
7711 void RecordCreateImage(uint32_t frameIndex,
7712 const VkImageCreateInfo& imageCreateInfo,
7715 void RecordDestroyBuffer(uint32_t frameIndex,
7717 void RecordDestroyImage(uint32_t frameIndex,
7719 void RecordTouchAllocation(uint32_t frameIndex,
7721 void RecordGetAllocationInfo(uint32_t frameIndex,
7723 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7725 void RecordDefragmentationBegin(uint32_t frameIndex,
7728 void RecordDefragmentationEnd(uint32_t frameIndex,
7730 void RecordSetPoolName(uint32_t frameIndex,
7741 class UserDataString
7745 const char* GetString()
const {
return m_Str; }
7755 VMA_MUTEX m_FileMutex;
7756 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
7758 void GetBasicParams(CallParams& outParams);
7761 template<
typename T>
7762 void PrintPointerList(uint64_t count,
const T* pItems)
7766 fprintf(m_File,
"%p", pItems[0]);
7767 for(uint64_t i = 1; i < count; ++i)
7769 fprintf(m_File,
" %p", pItems[i]);
7774 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7778 #endif // #if VMA_RECORDING_ENABLED
7783 class VmaAllocationObjectAllocator
7785 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7787 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7789 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7794 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7797 struct VmaCurrentBudgetData
7799 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7800 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7802 #if VMA_MEMORY_BUDGET
7803 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7804 VMA_RW_MUTEX m_BudgetMutex;
7805 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7806 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7807 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7808 #endif // #if VMA_MEMORY_BUDGET
7810 VmaCurrentBudgetData()
7812 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7814 m_BlockBytes[heapIndex] = 0;
7815 m_AllocationBytes[heapIndex] = 0;
7816 #if VMA_MEMORY_BUDGET
7817 m_VulkanUsage[heapIndex] = 0;
7818 m_VulkanBudget[heapIndex] = 0;
7819 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7823 #if VMA_MEMORY_BUDGET
7824 m_OperationsSinceBudgetFetch = 0;
7828 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7830 m_AllocationBytes[heapIndex] += allocationSize;
7831 #if VMA_MEMORY_BUDGET
7832 ++m_OperationsSinceBudgetFetch;
7836 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7838 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7839 m_AllocationBytes[heapIndex] -= allocationSize;
7840 #if VMA_MEMORY_BUDGET
7841 ++m_OperationsSinceBudgetFetch;
7847 struct VmaAllocator_T
7849 VMA_CLASS_NO_COPY(VmaAllocator_T)
7852 uint32_t m_VulkanApiVersion;
7853 bool m_UseKhrDedicatedAllocation;
7854 bool m_UseKhrBindMemory2;
7855 bool m_UseExtMemoryBudget;
7856 bool m_UseAmdDeviceCoherentMemory;
7857 bool m_UseKhrBufferDeviceAddress;
7859 VkInstance m_hInstance;
7860 bool m_AllocationCallbacksSpecified;
7861 VkAllocationCallbacks m_AllocationCallbacks;
7863 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7866 uint32_t m_HeapSizeLimitMask;
7868 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7869 VkPhysicalDeviceMemoryProperties m_MemProps;
7872 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7875 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7876 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7877 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7879 VmaCurrentBudgetData m_Budget;
7885 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7887 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7891 return m_VulkanFunctions;
7894 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7896 VkDeviceSize GetBufferImageGranularity()
const
7899 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7900 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7903 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7904 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7906 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7908 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7909 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7912 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7914 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7915 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7918 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7920 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7921 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7922 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7925 bool IsIntegratedGpu()
const
7927 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7930 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7932 #if VMA_RECORDING_ENABLED
7933 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7936 void GetBufferMemoryRequirements(
7938 VkMemoryRequirements& memReq,
7939 bool& requiresDedicatedAllocation,
7940 bool& prefersDedicatedAllocation)
const;
7941 void GetImageMemoryRequirements(
7943 VkMemoryRequirements& memReq,
7944 bool& requiresDedicatedAllocation,
7945 bool& prefersDedicatedAllocation)
const;
7948 VkResult AllocateMemory(
7949 const VkMemoryRequirements& vkMemReq,
7950 bool requiresDedicatedAllocation,
7951 bool prefersDedicatedAllocation,
7952 VkBuffer dedicatedBuffer,
7953 VkBufferUsageFlags dedicatedBufferUsage,
7954 VkImage dedicatedImage,
7956 VmaSuballocationType suballocType,
7957 size_t allocationCount,
7962 size_t allocationCount,
7965 VkResult ResizeAllocation(
7967 VkDeviceSize newSize);
7969 void CalculateStats(
VmaStats* pStats);
7972 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7974 #if VMA_STATS_STRING_ENABLED
7975 void PrintDetailedMap(
class VmaJsonWriter& json);
7978 VkResult DefragmentationBegin(
7982 VkResult DefragmentationEnd(
7985 VkResult DefragmentationPassBegin(
7988 VkResult DefragmentationPassEnd(
7995 void DestroyPool(
VmaPool pool);
7998 void SetCurrentFrameIndex(uint32_t frameIndex);
7999 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
8001 void MakePoolAllocationsLost(
8003 size_t* pLostAllocationCount);
8004 VkResult CheckPoolCorruption(
VmaPool hPool);
8005 VkResult CheckCorruption(uint32_t memoryTypeBits);
8010 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
8012 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
8014 VkResult BindVulkanBuffer(
8015 VkDeviceMemory memory,
8016 VkDeviceSize memoryOffset,
8020 VkResult BindVulkanImage(
8021 VkDeviceMemory memory,
8022 VkDeviceSize memoryOffset,
8029 VkResult BindBufferMemory(
8031 VkDeviceSize allocationLocalOffset,
8034 VkResult BindImageMemory(
8036 VkDeviceSize allocationLocalOffset,
8040 VkResult FlushOrInvalidateAllocation(
8042 VkDeviceSize offset, VkDeviceSize size,
8043 VMA_CACHE_OPERATION op);
8044 VkResult FlushOrInvalidateAllocations(
8045 uint32_t allocationCount,
8047 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
8048 VMA_CACHE_OPERATION op);
8050 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
8056 uint32_t GetGpuDefragmentationMemoryTypeBits();
8059 VkDeviceSize m_PreferredLargeHeapBlockSize;
8061 VkPhysicalDevice m_PhysicalDevice;
8062 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
8063 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
8065 VMA_RW_MUTEX m_PoolsMutex;
8067 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
8068 uint32_t m_NextPoolId;
8073 uint32_t m_GlobalMemoryTypeBits;
8075 #if VMA_RECORDING_ENABLED
8076 VmaRecorder* m_pRecorder;
8081 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
8082 void ImportVulkanFunctions_Static();
8087 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
8088 void ImportVulkanFunctions_Dynamic();
8091 void ValidateVulkanFunctions();
8093 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
8095 VkResult AllocateMemoryOfType(
8097 VkDeviceSize alignment,
8098 bool dedicatedAllocation,
8099 VkBuffer dedicatedBuffer,
8100 VkBufferUsageFlags dedicatedBufferUsage,
8101 VkImage dedicatedImage,
8103 uint32_t memTypeIndex,
8104 VmaSuballocationType suballocType,
8105 size_t allocationCount,
8109 VkResult AllocateDedicatedMemoryPage(
8111 VmaSuballocationType suballocType,
8112 uint32_t memTypeIndex,
8113 const VkMemoryAllocateInfo& allocInfo,
8115 bool isUserDataString,
8120 VkResult AllocateDedicatedMemory(
8122 VmaSuballocationType suballocType,
8123 uint32_t memTypeIndex,
8126 bool isUserDataString,
8128 VkBuffer dedicatedBuffer,
8129 VkBufferUsageFlags dedicatedBufferUsage,
8130 VkImage dedicatedImage,
8131 size_t allocationCount,
8140 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
8142 uint32_t CalculateGlobalMemoryTypeBits()
const;
8144 bool GetFlushOrInvalidateRange(
8146 VkDeviceSize offset, VkDeviceSize size,
8147 VkMappedMemoryRange& outRange)
const;
8149 #if VMA_MEMORY_BUDGET
8150 void UpdateVulkanBudget();
8151 #endif // #if VMA_MEMORY_BUDGET
8157 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8159 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8162 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8164 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8167 template<
typename T>
8170 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8173 template<
typename T>
8174 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8176 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8179 template<
typename T>
8180 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8185 VmaFree(hAllocator, ptr);
8189 template<
typename T>
8190 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8194 for(
size_t i = count; i--; )
8196 VmaFree(hAllocator, ptr);
8203 #if VMA_STATS_STRING_ENABLED
8205 class VmaStringBuilder
8208 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8209 size_t GetLength()
const {
return m_Data.size(); }
8210 const char* GetData()
const {
return m_Data.data(); }
8212 void Add(
char ch) { m_Data.push_back(ch); }
8213 void Add(
const char* pStr);
8214 void AddNewLine() { Add(
'\n'); }
8215 void AddNumber(uint32_t num);
8216 void AddNumber(uint64_t num);
8217 void AddPointer(
const void* ptr);
8220 VmaVector< char, VmaStlAllocator<char> > m_Data;
8223 void VmaStringBuilder::Add(
const char* pStr)
8225 const size_t strLen = strlen(pStr);
8228 const size_t oldCount = m_Data.size();
8229 m_Data.resize(oldCount + strLen);
8230 memcpy(m_Data.data() + oldCount, pStr, strLen);
8234 void VmaStringBuilder::AddNumber(uint32_t num)
8241 *--p =
'0' + (num % 10);
8248 void VmaStringBuilder::AddNumber(uint64_t num)
8255 *--p =
'0' + (num % 10);
8262 void VmaStringBuilder::AddPointer(
const void* ptr)
8265 VmaPtrToStr(buf,
sizeof(buf), ptr);
8269 #endif // #if VMA_STATS_STRING_ENABLED
8274 #if VMA_STATS_STRING_ENABLED
8278 VMA_CLASS_NO_COPY(VmaJsonWriter)
8280 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8283 void BeginObject(
bool singleLine =
false);
8286 void BeginArray(
bool singleLine =
false);
8289 void WriteString(
const char* pStr);
8290 void BeginString(
const char* pStr = VMA_NULL);
8291 void ContinueString(
const char* pStr);
8292 void ContinueString(uint32_t n);
8293 void ContinueString(uint64_t n);
8294 void ContinueString_Pointer(
const void* ptr);
8295 void EndString(
const char* pStr = VMA_NULL);
8297 void WriteNumber(uint32_t n);
8298 void WriteNumber(uint64_t n);
8299 void WriteBool(
bool b);
8303 static const char*
const INDENT;
8305 enum COLLECTION_TYPE
8307 COLLECTION_TYPE_OBJECT,
8308 COLLECTION_TYPE_ARRAY,
8312 COLLECTION_TYPE type;
8313 uint32_t valueCount;
8314 bool singleLineMode;
8317 VmaStringBuilder& m_SB;
8318 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8319 bool m_InsideString;
8321 void BeginValue(
bool isString);
8322 void WriteIndent(
bool oneLess =
false);
8325 const char*
const VmaJsonWriter::INDENT =
" ";
8327 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8329 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8330 m_InsideString(false)
8334 VmaJsonWriter::~VmaJsonWriter()
8336 VMA_ASSERT(!m_InsideString);
8337 VMA_ASSERT(m_Stack.empty());
8340 void VmaJsonWriter::BeginObject(
bool singleLine)
8342 VMA_ASSERT(!m_InsideString);
8348 item.type = COLLECTION_TYPE_OBJECT;
8349 item.valueCount = 0;
8350 item.singleLineMode = singleLine;
8351 m_Stack.push_back(item);
8354 void VmaJsonWriter::EndObject()
8356 VMA_ASSERT(!m_InsideString);
8361 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8365 void VmaJsonWriter::BeginArray(
bool singleLine)
8367 VMA_ASSERT(!m_InsideString);
8373 item.type = COLLECTION_TYPE_ARRAY;
8374 item.valueCount = 0;
8375 item.singleLineMode = singleLine;
8376 m_Stack.push_back(item);
8379 void VmaJsonWriter::EndArray()
8381 VMA_ASSERT(!m_InsideString);
8386 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8390 void VmaJsonWriter::WriteString(
const char* pStr)
8396 void VmaJsonWriter::BeginString(
const char* pStr)
8398 VMA_ASSERT(!m_InsideString);
8402 m_InsideString =
true;
8403 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8405 ContinueString(pStr);
8409 void VmaJsonWriter::ContinueString(
const char* pStr)
8411 VMA_ASSERT(m_InsideString);
8413 const size_t strLen = strlen(pStr);
8414 for(
size_t i = 0; i < strLen; ++i)
8447 VMA_ASSERT(0 &&
"Character not currently supported.");
8453 void VmaJsonWriter::ContinueString(uint32_t n)
8455 VMA_ASSERT(m_InsideString);
8459 void VmaJsonWriter::ContinueString(uint64_t n)
8461 VMA_ASSERT(m_InsideString);
8465 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8467 VMA_ASSERT(m_InsideString);
8468 m_SB.AddPointer(ptr);
8471 void VmaJsonWriter::EndString(
const char* pStr)
8473 VMA_ASSERT(m_InsideString);
8474 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8476 ContinueString(pStr);
8479 m_InsideString =
false;
8482 void VmaJsonWriter::WriteNumber(uint32_t n)
8484 VMA_ASSERT(!m_InsideString);
8489 void VmaJsonWriter::WriteNumber(uint64_t n)
8491 VMA_ASSERT(!m_InsideString);
8496 void VmaJsonWriter::WriteBool(
bool b)
8498 VMA_ASSERT(!m_InsideString);
8500 m_SB.Add(b ?
"true" :
"false");
8503 void VmaJsonWriter::WriteNull()
8505 VMA_ASSERT(!m_InsideString);
8510 void VmaJsonWriter::BeginValue(
bool isString)
8512 if(!m_Stack.empty())
8514 StackItem& currItem = m_Stack.back();
8515 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8516 currItem.valueCount % 2 == 0)
8518 VMA_ASSERT(isString);
8521 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8522 currItem.valueCount % 2 != 0)
8526 else if(currItem.valueCount > 0)
8535 ++currItem.valueCount;
8539 void VmaJsonWriter::WriteIndent(
bool oneLess)
8541 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8545 size_t count = m_Stack.size();
8546 if(count > 0 && oneLess)
8550 for(
size_t i = 0; i < count; ++i)
8557 #endif // #if VMA_STATS_STRING_ENABLED
8561 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8563 if(IsUserDataString())
8565 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8567 FreeUserDataString(hAllocator);
8569 if(pUserData != VMA_NULL)
8571 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8576 m_pUserData = pUserData;
8580 void VmaAllocation_T::ChangeBlockAllocation(
8582 VmaDeviceMemoryBlock* block,
8583 VkDeviceSize offset)
8585 VMA_ASSERT(block != VMA_NULL);
8586 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8589 if(block != m_BlockAllocation.m_Block)
8591 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8592 if(IsPersistentMap())
8594 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8595 block->Map(hAllocator, mapRefCount, VMA_NULL);
8598 m_BlockAllocation.m_Block = block;
8599 m_BlockAllocation.m_Offset = offset;
8602 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8604 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8605 m_BlockAllocation.m_Offset = newOffset;
8608 VkDeviceSize VmaAllocation_T::GetOffset()
const
8612 case ALLOCATION_TYPE_BLOCK:
8613 return m_BlockAllocation.m_Offset;
8614 case ALLOCATION_TYPE_DEDICATED:
8622 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8626 case ALLOCATION_TYPE_BLOCK:
8627 return m_BlockAllocation.m_Block->GetDeviceMemory();
8628 case ALLOCATION_TYPE_DEDICATED:
8629 return m_DedicatedAllocation.m_hMemory;
8632 return VK_NULL_HANDLE;
8636 void* VmaAllocation_T::GetMappedData()
const
8640 case ALLOCATION_TYPE_BLOCK:
8643 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8644 VMA_ASSERT(pBlockData != VMA_NULL);
8645 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8652 case ALLOCATION_TYPE_DEDICATED:
8653 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8654 return m_DedicatedAllocation.m_pMappedData;
8661 bool VmaAllocation_T::CanBecomeLost()
const
8665 case ALLOCATION_TYPE_BLOCK:
8666 return m_BlockAllocation.m_CanBecomeLost;
8667 case ALLOCATION_TYPE_DEDICATED:
8675 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8677 VMA_ASSERT(CanBecomeLost());
8683 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8686 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8691 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8697 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8707 #if VMA_STATS_STRING_ENABLED
8710 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8719 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8721 json.WriteString(
"Type");
8722 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8724 json.WriteString(
"Size");
8725 json.WriteNumber(m_Size);
8727 if(m_pUserData != VMA_NULL)
8729 json.WriteString(
"UserData");
8730 if(IsUserDataString())
8732 json.WriteString((
const char*)m_pUserData);
8737 json.ContinueString_Pointer(m_pUserData);
8742 json.WriteString(
"CreationFrameIndex");
8743 json.WriteNumber(m_CreationFrameIndex);
8745 json.WriteString(
"LastUseFrameIndex");
8746 json.WriteNumber(GetLastUseFrameIndex());
8748 if(m_BufferImageUsage != 0)
8750 json.WriteString(
"Usage");
8751 json.WriteNumber(m_BufferImageUsage);
8757 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8759 VMA_ASSERT(IsUserDataString());
8760 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8761 m_pUserData = VMA_NULL;
8764 void VmaAllocation_T::BlockAllocMap()
8766 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8768 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8774 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8778 void VmaAllocation_T::BlockAllocUnmap()
8780 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8782 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8788 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8792 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8794 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8798 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8800 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8801 *ppData = m_DedicatedAllocation.m_pMappedData;
8807 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8808 return VK_ERROR_MEMORY_MAP_FAILED;
8813 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8814 hAllocator->m_hDevice,
8815 m_DedicatedAllocation.m_hMemory,
8820 if(result == VK_SUCCESS)
8822 m_DedicatedAllocation.m_pMappedData = *ppData;
8829 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8831 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8833 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8838 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8839 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8840 hAllocator->m_hDevice,
8841 m_DedicatedAllocation.m_hMemory);
8846 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8850 #if VMA_STATS_STRING_ENABLED
8852 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8856 json.WriteString(
"Blocks");
8859 json.WriteString(
"Allocations");
8862 json.WriteString(
"UnusedRanges");
8865 json.WriteString(
"UsedBytes");
8868 json.WriteString(
"UnusedBytes");
8873 json.WriteString(
"AllocationSize");
8874 json.BeginObject(
true);
8875 json.WriteString(
"Min");
8877 json.WriteString(
"Avg");
8879 json.WriteString(
"Max");
8886 json.WriteString(
"UnusedRangeSize");
8887 json.BeginObject(
true);
8888 json.WriteString(
"Min");
8890 json.WriteString(
"Avg");
8892 json.WriteString(
"Max");
8900 #endif // #if VMA_STATS_STRING_ENABLED
8902 struct VmaSuballocationItemSizeLess
8905 const VmaSuballocationList::iterator lhs,
8906 const VmaSuballocationList::iterator rhs)
const
8908 return lhs->size < rhs->size;
8911 const VmaSuballocationList::iterator lhs,
8912 VkDeviceSize rhsSize)
const
8914 return lhs->size < rhsSize;
8922 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8924 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8928 #if VMA_STATS_STRING_ENABLED
8930 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8931 VkDeviceSize unusedBytes,
8932 size_t allocationCount,
8933 size_t unusedRangeCount)
const
8937 json.WriteString(
"TotalBytes");
8938 json.WriteNumber(GetSize());
8940 json.WriteString(
"UnusedBytes");
8941 json.WriteNumber(unusedBytes);
8943 json.WriteString(
"Allocations");
8944 json.WriteNumber((uint64_t)allocationCount);
8946 json.WriteString(
"UnusedRanges");
8947 json.WriteNumber((uint64_t)unusedRangeCount);
8949 json.WriteString(
"Suballocations");
8953 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8954 VkDeviceSize offset,
8957 json.BeginObject(
true);
8959 json.WriteString(
"Offset");
8960 json.WriteNumber(offset);
8962 hAllocation->PrintParameters(json);
8967 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8968 VkDeviceSize offset,
8969 VkDeviceSize size)
const
8971 json.BeginObject(
true);
8973 json.WriteString(
"Offset");
8974 json.WriteNumber(offset);
8976 json.WriteString(
"Type");
8977 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8979 json.WriteString(
"Size");
8980 json.WriteNumber(size);
8985 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8991 #endif // #if VMA_STATS_STRING_ENABLED
8996 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8997 VmaBlockMetadata(hAllocator),
9000 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9001 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
9005 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
9009 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
9011 VmaBlockMetadata::Init(size);
9014 m_SumFreeSize = size;
9016 VmaSuballocation suballoc = {};
9017 suballoc.offset = 0;
9018 suballoc.size = size;
9019 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9020 suballoc.hAllocation = VK_NULL_HANDLE;
9022 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9023 m_Suballocations.push_back(suballoc);
9024 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
9026 m_FreeSuballocationsBySize.push_back(suballocItem);
9029 bool VmaBlockMetadata_Generic::Validate()
const
9031 VMA_VALIDATE(!m_Suballocations.empty());
9034 VkDeviceSize calculatedOffset = 0;
9036 uint32_t calculatedFreeCount = 0;
9038 VkDeviceSize calculatedSumFreeSize = 0;
9041 size_t freeSuballocationsToRegister = 0;
9043 bool prevFree =
false;
9045 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9046 suballocItem != m_Suballocations.cend();
9049 const VmaSuballocation& subAlloc = *suballocItem;
9052 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
9054 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
9056 VMA_VALIDATE(!prevFree || !currFree);
9058 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
9062 calculatedSumFreeSize += subAlloc.size;
9063 ++calculatedFreeCount;
9064 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9066 ++freeSuballocationsToRegister;
9070 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
9074 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
9075 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
9078 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
9081 calculatedOffset += subAlloc.size;
9082 prevFree = currFree;
9087 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
9089 VkDeviceSize lastSize = 0;
9090 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
9092 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
9095 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9097 VMA_VALIDATE(suballocItem->size >= lastSize);
9099 lastSize = suballocItem->size;
9103 VMA_VALIDATE(ValidateFreeSuballocationList());
9104 VMA_VALIDATE(calculatedOffset == GetSize());
9105 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
9106 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
9111 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
9113 if(!m_FreeSuballocationsBySize.empty())
9115 return m_FreeSuballocationsBySize.back()->size;
9123 bool VmaBlockMetadata_Generic::IsEmpty()
const
9125 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
9128 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9132 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9144 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9145 suballocItem != m_Suballocations.cend();
9148 const VmaSuballocation& suballoc = *suballocItem;
9149 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9162 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9164 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9166 inoutStats.
size += GetSize();
9173 #if VMA_STATS_STRING_ENABLED
9175 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9177 PrintDetailedMap_Begin(json,
9179 m_Suballocations.size() - (
size_t)m_FreeCount,
9183 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9184 suballocItem != m_Suballocations.cend();
9185 ++suballocItem, ++i)
9187 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9189 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9193 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9197 PrintDetailedMap_End(json);
9200 #endif // #if VMA_STATS_STRING_ENABLED
9202 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9203 uint32_t currentFrameIndex,
9204 uint32_t frameInUseCount,
9205 VkDeviceSize bufferImageGranularity,
9206 VkDeviceSize allocSize,
9207 VkDeviceSize allocAlignment,
9209 VmaSuballocationType allocType,
9210 bool canMakeOtherLost,
9212 VmaAllocationRequest* pAllocationRequest)
9214 VMA_ASSERT(allocSize > 0);
9215 VMA_ASSERT(!upperAddress);
9216 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9217 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9218 VMA_HEAVY_ASSERT(Validate());
9220 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9223 if(canMakeOtherLost ==
false &&
9224 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9230 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9231 if(freeSuballocCount > 0)
9236 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9237 m_FreeSuballocationsBySize.data(),
9238 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9239 allocSize + 2 * VMA_DEBUG_MARGIN,
9240 VmaSuballocationItemSizeLess());
9241 size_t index = it - m_FreeSuballocationsBySize.data();
9242 for(; index < freeSuballocCount; ++index)
9247 bufferImageGranularity,
9251 m_FreeSuballocationsBySize[index],
9253 &pAllocationRequest->offset,
9254 &pAllocationRequest->itemsToMakeLostCount,
9255 &pAllocationRequest->sumFreeSize,
9256 &pAllocationRequest->sumItemSize))
9258 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9263 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9265 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9266 it != m_Suballocations.end();
9269 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9272 bufferImageGranularity,
9278 &pAllocationRequest->offset,
9279 &pAllocationRequest->itemsToMakeLostCount,
9280 &pAllocationRequest->sumFreeSize,
9281 &pAllocationRequest->sumItemSize))
9283 pAllocationRequest->item = it;
9291 for(
size_t index = freeSuballocCount; index--; )
9296 bufferImageGranularity,
9300 m_FreeSuballocationsBySize[index],
9302 &pAllocationRequest->offset,
9303 &pAllocationRequest->itemsToMakeLostCount,
9304 &pAllocationRequest->sumFreeSize,
9305 &pAllocationRequest->sumItemSize))
9307 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9314 if(canMakeOtherLost)
9319 VmaAllocationRequest tmpAllocRequest = {};
9320 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9321 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9322 suballocIt != m_Suballocations.end();
9325 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9326 suballocIt->hAllocation->CanBecomeLost())
9331 bufferImageGranularity,
9337 &tmpAllocRequest.offset,
9338 &tmpAllocRequest.itemsToMakeLostCount,
9339 &tmpAllocRequest.sumFreeSize,
9340 &tmpAllocRequest.sumItemSize))
9344 *pAllocationRequest = tmpAllocRequest;
9345 pAllocationRequest->item = suballocIt;
9348 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9350 *pAllocationRequest = tmpAllocRequest;
9351 pAllocationRequest->item = suballocIt;
9364 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9365 uint32_t currentFrameIndex,
9366 uint32_t frameInUseCount,
9367 VmaAllocationRequest* pAllocationRequest)
9369 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9371 while(pAllocationRequest->itemsToMakeLostCount > 0)
9373 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9375 ++pAllocationRequest->item;
9377 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9378 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9379 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9380 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9382 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9383 --pAllocationRequest->itemsToMakeLostCount;
9391 VMA_HEAVY_ASSERT(Validate());
9392 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9393 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9398 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9400 uint32_t lostAllocationCount = 0;
9401 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9402 it != m_Suballocations.end();
9405 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9406 it->hAllocation->CanBecomeLost() &&
9407 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9409 it = FreeSuballocation(it);
9410 ++lostAllocationCount;
9413 return lostAllocationCount;
9416 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9418 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9419 it != m_Suballocations.end();
9422 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9424 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9426 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9427 return VK_ERROR_VALIDATION_FAILED_EXT;
9429 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9431 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9432 return VK_ERROR_VALIDATION_FAILED_EXT;
9440 void VmaBlockMetadata_Generic::Alloc(
9441 const VmaAllocationRequest& request,
9442 VmaSuballocationType type,
9443 VkDeviceSize allocSize,
9446 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9447 VMA_ASSERT(request.item != m_Suballocations.end());
9448 VmaSuballocation& suballoc = *request.item;
9450 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9452 VMA_ASSERT(request.offset >= suballoc.offset);
9453 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9454 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9455 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9459 UnregisterFreeSuballocation(request.item);
9461 suballoc.offset = request.offset;
9462 suballoc.size = allocSize;
9463 suballoc.type = type;
9464 suballoc.hAllocation = hAllocation;
9469 VmaSuballocation paddingSuballoc = {};
9470 paddingSuballoc.offset = request.offset + allocSize;
9471 paddingSuballoc.size = paddingEnd;
9472 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9473 VmaSuballocationList::iterator next = request.item;
9475 const VmaSuballocationList::iterator paddingEndItem =
9476 m_Suballocations.insert(next, paddingSuballoc);
9477 RegisterFreeSuballocation(paddingEndItem);
9483 VmaSuballocation paddingSuballoc = {};
9484 paddingSuballoc.offset = request.offset - paddingBegin;
9485 paddingSuballoc.size = paddingBegin;
9486 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9487 const VmaSuballocationList::iterator paddingBeginItem =
9488 m_Suballocations.insert(request.item, paddingSuballoc);
9489 RegisterFreeSuballocation(paddingBeginItem);
9493 m_FreeCount = m_FreeCount - 1;
9494 if(paddingBegin > 0)
9502 m_SumFreeSize -= allocSize;
9505 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9507 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9508 suballocItem != m_Suballocations.end();
9511 VmaSuballocation& suballoc = *suballocItem;
9512 if(suballoc.hAllocation == allocation)
9514 FreeSuballocation(suballocItem);
9515 VMA_HEAVY_ASSERT(Validate());
9519 VMA_ASSERT(0 &&
"Not found!");
9522 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9524 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9525 suballocItem != m_Suballocations.end();
9528 VmaSuballocation& suballoc = *suballocItem;
9529 if(suballoc.offset == offset)
9531 FreeSuballocation(suballocItem);
9535 VMA_ASSERT(0 &&
"Not found!");
9538 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9540 VkDeviceSize lastSize = 0;
9541 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9543 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9545 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9546 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9547 VMA_VALIDATE(it->size >= lastSize);
9548 lastSize = it->size;
9553 bool VmaBlockMetadata_Generic::CheckAllocation(
9554 uint32_t currentFrameIndex,
9555 uint32_t frameInUseCount,
9556 VkDeviceSize bufferImageGranularity,
9557 VkDeviceSize allocSize,
9558 VkDeviceSize allocAlignment,
9559 VmaSuballocationType allocType,
9560 VmaSuballocationList::const_iterator suballocItem,
9561 bool canMakeOtherLost,
9562 VkDeviceSize* pOffset,
9563 size_t* itemsToMakeLostCount,
9564 VkDeviceSize* pSumFreeSize,
9565 VkDeviceSize* pSumItemSize)
const
9567 VMA_ASSERT(allocSize > 0);
9568 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9569 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9570 VMA_ASSERT(pOffset != VMA_NULL);
9572 *itemsToMakeLostCount = 0;
9576 if(canMakeOtherLost)
9578 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9580 *pSumFreeSize = suballocItem->size;
9584 if(suballocItem->hAllocation->CanBecomeLost() &&
9585 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9587 ++*itemsToMakeLostCount;
9588 *pSumItemSize = suballocItem->size;
9597 if(GetSize() - suballocItem->offset < allocSize)
9603 *pOffset = suballocItem->offset;
9606 if(VMA_DEBUG_MARGIN > 0)
9608 *pOffset += VMA_DEBUG_MARGIN;
9612 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9616 if(bufferImageGranularity > 1)
9618 bool bufferImageGranularityConflict =
false;
9619 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9620 while(prevSuballocItem != m_Suballocations.cbegin())
9623 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9624 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9626 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9628 bufferImageGranularityConflict =
true;
9636 if(bufferImageGranularityConflict)
9638 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9644 if(*pOffset >= suballocItem->offset + suballocItem->size)
9650 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9653 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9655 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9657 if(suballocItem->offset + totalSize > GetSize())
9664 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9665 if(totalSize > suballocItem->size)
9667 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9668 while(remainingSize > 0)
9671 if(lastSuballocItem == m_Suballocations.cend())
9675 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9677 *pSumFreeSize += lastSuballocItem->size;
9681 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9682 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9683 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9685 ++*itemsToMakeLostCount;
9686 *pSumItemSize += lastSuballocItem->size;
9693 remainingSize = (lastSuballocItem->size < remainingSize) ?
9694 remainingSize - lastSuballocItem->size : 0;
9700 if(bufferImageGranularity > 1)
9702 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9704 while(nextSuballocItem != m_Suballocations.cend())
9706 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9707 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9709 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9711 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9712 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9713 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9715 ++*itemsToMakeLostCount;
9734 const VmaSuballocation& suballoc = *suballocItem;
9735 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9737 *pSumFreeSize = suballoc.size;
9740 if(suballoc.size < allocSize)
9746 *pOffset = suballoc.offset;
9749 if(VMA_DEBUG_MARGIN > 0)
9751 *pOffset += VMA_DEBUG_MARGIN;
9755 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9759 if(bufferImageGranularity > 1)
9761 bool bufferImageGranularityConflict =
false;
9762 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9763 while(prevSuballocItem != m_Suballocations.cbegin())
9766 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9767 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9769 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9771 bufferImageGranularityConflict =
true;
9779 if(bufferImageGranularityConflict)
9781 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9786 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9789 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9792 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9799 if(bufferImageGranularity > 1)
9801 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9803 while(nextSuballocItem != m_Suballocations.cend())
9805 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9806 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9808 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9827 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9829 VMA_ASSERT(item != m_Suballocations.end());
9830 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9832 VmaSuballocationList::iterator nextItem = item;
9834 VMA_ASSERT(nextItem != m_Suballocations.end());
9835 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9837 item->size += nextItem->size;
9839 m_Suballocations.erase(nextItem);
9842 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9845 VmaSuballocation& suballoc = *suballocItem;
9846 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9847 suballoc.hAllocation = VK_NULL_HANDLE;
9851 m_SumFreeSize += suballoc.size;
9854 bool mergeWithNext =
false;
9855 bool mergeWithPrev =
false;
9857 VmaSuballocationList::iterator nextItem = suballocItem;
9859 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9861 mergeWithNext =
true;
9864 VmaSuballocationList::iterator prevItem = suballocItem;
9865 if(suballocItem != m_Suballocations.begin())
9868 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9870 mergeWithPrev =
true;
9876 UnregisterFreeSuballocation(nextItem);
9877 MergeFreeWithNext(suballocItem);
9882 UnregisterFreeSuballocation(prevItem);
9883 MergeFreeWithNext(prevItem);
9884 RegisterFreeSuballocation(prevItem);
9889 RegisterFreeSuballocation(suballocItem);
9890 return suballocItem;
9894 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9896 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9897 VMA_ASSERT(item->size > 0);
9901 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9903 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9905 if(m_FreeSuballocationsBySize.empty())
9907 m_FreeSuballocationsBySize.push_back(item);
9911 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9919 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9921 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9922 VMA_ASSERT(item->size > 0);
9926 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9928 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9930 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9931 m_FreeSuballocationsBySize.data(),
9932 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9934 VmaSuballocationItemSizeLess());
9935 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9936 index < m_FreeSuballocationsBySize.size();
9939 if(m_FreeSuballocationsBySize[index] == item)
9941 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9944 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9946 VMA_ASSERT(0 &&
"Not found.");
9952 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9953 VkDeviceSize bufferImageGranularity,
9954 VmaSuballocationType& inOutPrevSuballocType)
const
9956 if(bufferImageGranularity == 1 || IsEmpty())
9961 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9962 bool typeConflictFound =
false;
9963 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9964 it != m_Suballocations.cend();
9967 const VmaSuballocationType suballocType = it->type;
9968 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9970 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9971 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9973 typeConflictFound =
true;
9975 inOutPrevSuballocType = suballocType;
9979 return typeConflictFound || minAlignment >= bufferImageGranularity;
9985 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9986 VmaBlockMetadata(hAllocator),
9988 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9989 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9990 m_1stVectorIndex(0),
9991 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9992 m_1stNullItemsBeginCount(0),
9993 m_1stNullItemsMiddleCount(0),
9994 m_2ndNullItemsCount(0)
9998 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
10002 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
10004 VmaBlockMetadata::Init(size);
10005 m_SumFreeSize = size;
10008 bool VmaBlockMetadata_Linear::Validate()
const
10010 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10011 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10013 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
10014 VMA_VALIDATE(!suballocations1st.empty() ||
10015 suballocations2nd.empty() ||
10016 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
10018 if(!suballocations1st.empty())
10021 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
10023 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
10025 if(!suballocations2nd.empty())
10028 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
10031 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
10032 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
10034 VkDeviceSize sumUsedSize = 0;
10035 const size_t suballoc1stCount = suballocations1st.size();
10036 VkDeviceSize offset = VMA_DEBUG_MARGIN;
10038 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10040 const size_t suballoc2ndCount = suballocations2nd.size();
10041 size_t nullItem2ndCount = 0;
10042 for(
size_t i = 0; i < suballoc2ndCount; ++i)
10044 const VmaSuballocation& suballoc = suballocations2nd[i];
10045 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10047 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10048 VMA_VALIDATE(suballoc.offset >= offset);
10052 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10053 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10054 sumUsedSize += suballoc.size;
10058 ++nullItem2ndCount;
10061 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10064 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10067 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
10069 const VmaSuballocation& suballoc = suballocations1st[i];
10070 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
10071 suballoc.hAllocation == VK_NULL_HANDLE);
10074 size_t nullItem1stCount = m_1stNullItemsBeginCount;
10076 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
10078 const VmaSuballocation& suballoc = suballocations1st[i];
10079 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10081 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10082 VMA_VALIDATE(suballoc.offset >= offset);
10083 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
10087 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10088 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10089 sumUsedSize += suballoc.size;
10093 ++nullItem1stCount;
10096 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10098 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
10100 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10102 const size_t suballoc2ndCount = suballocations2nd.size();
10103 size_t nullItem2ndCount = 0;
10104 for(
size_t i = suballoc2ndCount; i--; )
10106 const VmaSuballocation& suballoc = suballocations2nd[i];
10107 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
10109 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
10110 VMA_VALIDATE(suballoc.offset >= offset);
10114 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
10115 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
10116 sumUsedSize += suballoc.size;
10120 ++nullItem2ndCount;
10123 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
10126 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
10129 VMA_VALIDATE(offset <= GetSize());
10130 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
10135 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
10137 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
10138 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
10141 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
10143 const VkDeviceSize size = GetSize();
10155 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10157 switch(m_2ndVectorMode)
10159 case SECOND_VECTOR_EMPTY:
10165 const size_t suballocations1stCount = suballocations1st.size();
10166 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10167 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10168 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10170 firstSuballoc.offset,
10171 size - (lastSuballoc.offset + lastSuballoc.size));
10175 case SECOND_VECTOR_RING_BUFFER:
10180 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10181 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10182 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10183 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10187 case SECOND_VECTOR_DOUBLE_STACK:
10192 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10193 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10194 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10195 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10205 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10207 const VkDeviceSize size = GetSize();
10208 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10209 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10210 const size_t suballoc1stCount = suballocations1st.size();
10211 const size_t suballoc2ndCount = suballocations2nd.size();
10222 VkDeviceSize lastOffset = 0;
10224 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10226 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10227 size_t nextAlloc2ndIndex = 0;
10228 while(lastOffset < freeSpace2ndTo1stEnd)
10231 while(nextAlloc2ndIndex < suballoc2ndCount &&
10232 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10234 ++nextAlloc2ndIndex;
10238 if(nextAlloc2ndIndex < suballoc2ndCount)
10240 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10243 if(lastOffset < suballoc.offset)
10246 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10260 lastOffset = suballoc.offset + suballoc.size;
10261 ++nextAlloc2ndIndex;
10267 if(lastOffset < freeSpace2ndTo1stEnd)
10269 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10277 lastOffset = freeSpace2ndTo1stEnd;
10282 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10283 const VkDeviceSize freeSpace1stTo2ndEnd =
10284 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10285 while(lastOffset < freeSpace1stTo2ndEnd)
10288 while(nextAlloc1stIndex < suballoc1stCount &&
10289 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10291 ++nextAlloc1stIndex;
10295 if(nextAlloc1stIndex < suballoc1stCount)
10297 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10300 if(lastOffset < suballoc.offset)
10303 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10317 lastOffset = suballoc.offset + suballoc.size;
10318 ++nextAlloc1stIndex;
10324 if(lastOffset < freeSpace1stTo2ndEnd)
10326 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10334 lastOffset = freeSpace1stTo2ndEnd;
10338 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10340 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10341 while(lastOffset < size)
10344 while(nextAlloc2ndIndex != SIZE_MAX &&
10345 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10347 --nextAlloc2ndIndex;
10351 if(nextAlloc2ndIndex != SIZE_MAX)
10353 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10356 if(lastOffset < suballoc.offset)
10359 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10373 lastOffset = suballoc.offset + suballoc.size;
10374 --nextAlloc2ndIndex;
10380 if(lastOffset < size)
10382 const VkDeviceSize unusedRangeSize = size - lastOffset;
10398 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10400 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10401 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10402 const VkDeviceSize size = GetSize();
10403 const size_t suballoc1stCount = suballocations1st.size();
10404 const size_t suballoc2ndCount = suballocations2nd.size();
10406 inoutStats.
size += size;
10408 VkDeviceSize lastOffset = 0;
10410 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10412 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10413 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10414 while(lastOffset < freeSpace2ndTo1stEnd)
10417 while(nextAlloc2ndIndex < suballoc2ndCount &&
10418 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10420 ++nextAlloc2ndIndex;
10424 if(nextAlloc2ndIndex < suballoc2ndCount)
10426 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10429 if(lastOffset < suballoc.offset)
10432 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10443 lastOffset = suballoc.offset + suballoc.size;
10444 ++nextAlloc2ndIndex;
10449 if(lastOffset < freeSpace2ndTo1stEnd)
10452 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10459 lastOffset = freeSpace2ndTo1stEnd;
10464 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10465 const VkDeviceSize freeSpace1stTo2ndEnd =
10466 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10467 while(lastOffset < freeSpace1stTo2ndEnd)
10470 while(nextAlloc1stIndex < suballoc1stCount &&
10471 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10473 ++nextAlloc1stIndex;
10477 if(nextAlloc1stIndex < suballoc1stCount)
10479 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10482 if(lastOffset < suballoc.offset)
10485 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10496 lastOffset = suballoc.offset + suballoc.size;
10497 ++nextAlloc1stIndex;
10502 if(lastOffset < freeSpace1stTo2ndEnd)
10505 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10512 lastOffset = freeSpace1stTo2ndEnd;
10516 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10518 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10519 while(lastOffset < size)
10522 while(nextAlloc2ndIndex != SIZE_MAX &&
10523 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10525 --nextAlloc2ndIndex;
10529 if(nextAlloc2ndIndex != SIZE_MAX)
10531 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10534 if(lastOffset < suballoc.offset)
10537 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10548 lastOffset = suballoc.offset + suballoc.size;
10549 --nextAlloc2ndIndex;
10554 if(lastOffset < size)
10557 const VkDeviceSize unusedRangeSize = size - lastOffset;
10570 #if VMA_STATS_STRING_ENABLED
10571 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10573 const VkDeviceSize size = GetSize();
10574 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10575 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10576 const size_t suballoc1stCount = suballocations1st.size();
10577 const size_t suballoc2ndCount = suballocations2nd.size();
10581 size_t unusedRangeCount = 0;
10582 VkDeviceSize usedBytes = 0;
10584 VkDeviceSize lastOffset = 0;
10586 size_t alloc2ndCount = 0;
10587 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10589 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10590 size_t nextAlloc2ndIndex = 0;
10591 while(lastOffset < freeSpace2ndTo1stEnd)
10594 while(nextAlloc2ndIndex < suballoc2ndCount &&
10595 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10597 ++nextAlloc2ndIndex;
10601 if(nextAlloc2ndIndex < suballoc2ndCount)
10603 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10606 if(lastOffset < suballoc.offset)
10609 ++unusedRangeCount;
10615 usedBytes += suballoc.size;
10618 lastOffset = suballoc.offset + suballoc.size;
10619 ++nextAlloc2ndIndex;
10624 if(lastOffset < freeSpace2ndTo1stEnd)
10627 ++unusedRangeCount;
10631 lastOffset = freeSpace2ndTo1stEnd;
10636 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10637 size_t alloc1stCount = 0;
10638 const VkDeviceSize freeSpace1stTo2ndEnd =
10639 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10640 while(lastOffset < freeSpace1stTo2ndEnd)
10643 while(nextAlloc1stIndex < suballoc1stCount &&
10644 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10646 ++nextAlloc1stIndex;
10650 if(nextAlloc1stIndex < suballoc1stCount)
10652 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10655 if(lastOffset < suballoc.offset)
10658 ++unusedRangeCount;
10664 usedBytes += suballoc.size;
10667 lastOffset = suballoc.offset + suballoc.size;
10668 ++nextAlloc1stIndex;
10673 if(lastOffset < size)
10676 ++unusedRangeCount;
10680 lastOffset = freeSpace1stTo2ndEnd;
10684 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10686 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10687 while(lastOffset < size)
10690 while(nextAlloc2ndIndex != SIZE_MAX &&
10691 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10693 --nextAlloc2ndIndex;
10697 if(nextAlloc2ndIndex != SIZE_MAX)
10699 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10702 if(lastOffset < suballoc.offset)
10705 ++unusedRangeCount;
10711 usedBytes += suballoc.size;
10714 lastOffset = suballoc.offset + suballoc.size;
10715 --nextAlloc2ndIndex;
10720 if(lastOffset < size)
10723 ++unusedRangeCount;
10732 const VkDeviceSize unusedBytes = size - usedBytes;
10733 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10738 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10740 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10741 size_t nextAlloc2ndIndex = 0;
10742 while(lastOffset < freeSpace2ndTo1stEnd)
10745 while(nextAlloc2ndIndex < suballoc2ndCount &&
10746 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10748 ++nextAlloc2ndIndex;
10752 if(nextAlloc2ndIndex < suballoc2ndCount)
10754 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10757 if(lastOffset < suballoc.offset)
10760 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10761 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10766 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10769 lastOffset = suballoc.offset + suballoc.size;
10770 ++nextAlloc2ndIndex;
10775 if(lastOffset < freeSpace2ndTo1stEnd)
10778 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10779 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10783 lastOffset = freeSpace2ndTo1stEnd;
10788 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10789 while(lastOffset < freeSpace1stTo2ndEnd)
10792 while(nextAlloc1stIndex < suballoc1stCount &&
10793 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10795 ++nextAlloc1stIndex;
10799 if(nextAlloc1stIndex < suballoc1stCount)
10801 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10804 if(lastOffset < suballoc.offset)
10807 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10808 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10813 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10816 lastOffset = suballoc.offset + suballoc.size;
10817 ++nextAlloc1stIndex;
10822 if(lastOffset < freeSpace1stTo2ndEnd)
10825 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10826 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10830 lastOffset = freeSpace1stTo2ndEnd;
10834 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10836 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10837 while(lastOffset < size)
10840 while(nextAlloc2ndIndex != SIZE_MAX &&
10841 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10843 --nextAlloc2ndIndex;
10847 if(nextAlloc2ndIndex != SIZE_MAX)
10849 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10852 if(lastOffset < suballoc.offset)
10855 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10856 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10861 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10864 lastOffset = suballoc.offset + suballoc.size;
10865 --nextAlloc2ndIndex;
10870 if(lastOffset < size)
10873 const VkDeviceSize unusedRangeSize = size - lastOffset;
10874 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10883 PrintDetailedMap_End(json);
10885 #endif // #if VMA_STATS_STRING_ENABLED
10887 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10888 uint32_t currentFrameIndex,
10889 uint32_t frameInUseCount,
10890 VkDeviceSize bufferImageGranularity,
10891 VkDeviceSize allocSize,
10892 VkDeviceSize allocAlignment,
10894 VmaSuballocationType allocType,
10895 bool canMakeOtherLost,
10897 VmaAllocationRequest* pAllocationRequest)
10899 VMA_ASSERT(allocSize > 0);
10900 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10901 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10902 VMA_HEAVY_ASSERT(Validate());
10903 return upperAddress ?
10904 CreateAllocationRequest_UpperAddress(
10905 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10906 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10907 CreateAllocationRequest_LowerAddress(
10908 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10909 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10912 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10913 uint32_t currentFrameIndex,
10914 uint32_t frameInUseCount,
10915 VkDeviceSize bufferImageGranularity,
10916 VkDeviceSize allocSize,
10917 VkDeviceSize allocAlignment,
10918 VmaSuballocationType allocType,
10919 bool canMakeOtherLost,
10921 VmaAllocationRequest* pAllocationRequest)
10923 const VkDeviceSize size = GetSize();
10924 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10925 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10927 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10929 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10934 if(allocSize > size)
10938 VkDeviceSize resultBaseOffset = size - allocSize;
10939 if(!suballocations2nd.empty())
10941 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10942 resultBaseOffset = lastSuballoc.offset - allocSize;
10943 if(allocSize > lastSuballoc.offset)
10950 VkDeviceSize resultOffset = resultBaseOffset;
10953 if(VMA_DEBUG_MARGIN > 0)
10955 if(resultOffset < VMA_DEBUG_MARGIN)
10959 resultOffset -= VMA_DEBUG_MARGIN;
10963 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10967 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10969 bool bufferImageGranularityConflict =
false;
10970 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10972 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10973 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10975 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10977 bufferImageGranularityConflict =
true;
10985 if(bufferImageGranularityConflict)
10987 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10992 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10993 suballocations1st.back().offset + suballocations1st.back().size :
10995 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10999 if(bufferImageGranularity > 1)
11001 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11003 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11004 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11006 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
11020 pAllocationRequest->offset = resultOffset;
11021 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
11022 pAllocationRequest->sumItemSize = 0;
11024 pAllocationRequest->itemsToMakeLostCount = 0;
11025 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
11032 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
11033 uint32_t currentFrameIndex,
11034 uint32_t frameInUseCount,
11035 VkDeviceSize bufferImageGranularity,
11036 VkDeviceSize allocSize,
11037 VkDeviceSize allocAlignment,
11038 VmaSuballocationType allocType,
11039 bool canMakeOtherLost,
11041 VmaAllocationRequest* pAllocationRequest)
11043 const VkDeviceSize size = GetSize();
11044 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11045 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11047 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11051 VkDeviceSize resultBaseOffset = 0;
11052 if(!suballocations1st.empty())
11054 const VmaSuballocation& lastSuballoc = suballocations1st.back();
11055 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11059 VkDeviceSize resultOffset = resultBaseOffset;
11062 if(VMA_DEBUG_MARGIN > 0)
11064 resultOffset += VMA_DEBUG_MARGIN;
11068 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11072 if(bufferImageGranularity > 1 && !suballocations1st.empty())
11074 bool bufferImageGranularityConflict =
false;
11075 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
11077 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
11078 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11080 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11082 bufferImageGranularityConflict =
true;
11090 if(bufferImageGranularityConflict)
11092 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11096 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
11097 suballocations2nd.back().offset : size;
11100 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
11104 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11106 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
11108 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
11109 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11111 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11125 pAllocationRequest->offset = resultOffset;
11126 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
11127 pAllocationRequest->sumItemSize = 0;
11129 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
11130 pAllocationRequest->itemsToMakeLostCount = 0;
11137 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11139 VMA_ASSERT(!suballocations1st.empty());
11141 VkDeviceSize resultBaseOffset = 0;
11142 if(!suballocations2nd.empty())
11144 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
11145 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
11149 VkDeviceSize resultOffset = resultBaseOffset;
11152 if(VMA_DEBUG_MARGIN > 0)
11154 resultOffset += VMA_DEBUG_MARGIN;
11158 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11162 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
11164 bool bufferImageGranularityConflict =
false;
11165 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11167 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11168 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11170 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11172 bufferImageGranularityConflict =
true;
11180 if(bufferImageGranularityConflict)
11182 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11186 pAllocationRequest->itemsToMakeLostCount = 0;
11187 pAllocationRequest->sumItemSize = 0;
11188 size_t index1st = m_1stNullItemsBeginCount;
11190 if(canMakeOtherLost)
11192 while(index1st < suballocations1st.size() &&
11193 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11196 const VmaSuballocation& suballoc = suballocations1st[index1st];
11197 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11203 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11204 if(suballoc.hAllocation->CanBecomeLost() &&
11205 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11207 ++pAllocationRequest->itemsToMakeLostCount;
11208 pAllocationRequest->sumItemSize += suballoc.size;
11220 if(bufferImageGranularity > 1)
11222 while(index1st < suballocations1st.size())
11224 const VmaSuballocation& suballoc = suballocations1st[index1st];
11225 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11227 if(suballoc.hAllocation != VK_NULL_HANDLE)
11230 if(suballoc.hAllocation->CanBecomeLost() &&
11231 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11233 ++pAllocationRequest->itemsToMakeLostCount;
11234 pAllocationRequest->sumItemSize += suballoc.size;
11252 if(index1st == suballocations1st.size() &&
11253 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11256 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11261 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11262 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11266 if(bufferImageGranularity > 1)
11268 for(
size_t nextSuballocIndex = index1st;
11269 nextSuballocIndex < suballocations1st.size();
11270 nextSuballocIndex++)
11272 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11273 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11275 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11289 pAllocationRequest->offset = resultOffset;
11290 pAllocationRequest->sumFreeSize =
11291 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11293 - pAllocationRequest->sumItemSize;
11294 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11303 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11304 uint32_t currentFrameIndex,
11305 uint32_t frameInUseCount,
11306 VmaAllocationRequest* pAllocationRequest)
11308 if(pAllocationRequest->itemsToMakeLostCount == 0)
11313 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11316 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11317 size_t index = m_1stNullItemsBeginCount;
11318 size_t madeLostCount = 0;
11319 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11321 if(index == suballocations->size())
11325 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11327 suballocations = &AccessSuballocations2nd();
11331 VMA_ASSERT(!suballocations->empty());
11333 VmaSuballocation& suballoc = (*suballocations)[index];
11334 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11336 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11337 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11338 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11340 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11341 suballoc.hAllocation = VK_NULL_HANDLE;
11342 m_SumFreeSize += suballoc.size;
11343 if(suballocations == &AccessSuballocations1st())
11345 ++m_1stNullItemsMiddleCount;
11349 ++m_2ndNullItemsCount;
11361 CleanupAfterFree();
11367 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11369 uint32_t lostAllocationCount = 0;
11371 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11372 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11374 VmaSuballocation& suballoc = suballocations1st[i];
11375 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11376 suballoc.hAllocation->CanBecomeLost() &&
11377 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11379 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11380 suballoc.hAllocation = VK_NULL_HANDLE;
11381 ++m_1stNullItemsMiddleCount;
11382 m_SumFreeSize += suballoc.size;
11383 ++lostAllocationCount;
11387 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11388 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11390 VmaSuballocation& suballoc = suballocations2nd[i];
11391 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11392 suballoc.hAllocation->CanBecomeLost() &&
11393 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11395 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11396 suballoc.hAllocation = VK_NULL_HANDLE;
11397 ++m_2ndNullItemsCount;
11398 m_SumFreeSize += suballoc.size;
11399 ++lostAllocationCount;
11403 if(lostAllocationCount)
11405 CleanupAfterFree();
11408 return lostAllocationCount;
11411 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11413 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11414 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11416 const VmaSuballocation& suballoc = suballocations1st[i];
11417 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11419 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11421 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11422 return VK_ERROR_VALIDATION_FAILED_EXT;
11424 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11426 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11427 return VK_ERROR_VALIDATION_FAILED_EXT;
11432 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11433 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11435 const VmaSuballocation& suballoc = suballocations2nd[i];
11436 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11438 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11440 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11441 return VK_ERROR_VALIDATION_FAILED_EXT;
11443 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11445 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11446 return VK_ERROR_VALIDATION_FAILED_EXT;
11454 void VmaBlockMetadata_Linear::Alloc(
11455 const VmaAllocationRequest& request,
11456 VmaSuballocationType type,
11457 VkDeviceSize allocSize,
11460 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11462 switch(request.type)
11464 case VmaAllocationRequestType::UpperAddress:
11466 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11467 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11468 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11469 suballocations2nd.push_back(newSuballoc);
11470 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11473 case VmaAllocationRequestType::EndOf1st:
11475 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11477 VMA_ASSERT(suballocations1st.empty() ||
11478 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11480 VMA_ASSERT(request.offset + allocSize <= GetSize());
11482 suballocations1st.push_back(newSuballoc);
11485 case VmaAllocationRequestType::EndOf2nd:
11487 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11489 VMA_ASSERT(!suballocations1st.empty() &&
11490 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11491 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11493 switch(m_2ndVectorMode)
11495 case SECOND_VECTOR_EMPTY:
11497 VMA_ASSERT(suballocations2nd.empty());
11498 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11500 case SECOND_VECTOR_RING_BUFFER:
11502 VMA_ASSERT(!suballocations2nd.empty());
11504 case SECOND_VECTOR_DOUBLE_STACK:
11505 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11511 suballocations2nd.push_back(newSuballoc);
11515 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11518 m_SumFreeSize -= newSuballoc.size;
11521 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11523 FreeAtOffset(allocation->GetOffset());
11526 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11528 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11529 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11531 if(!suballocations1st.empty())
11534 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11535 if(firstSuballoc.offset == offset)
11537 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11538 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11539 m_SumFreeSize += firstSuballoc.size;
11540 ++m_1stNullItemsBeginCount;
11541 CleanupAfterFree();
11547 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11548 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11550 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11551 if(lastSuballoc.offset == offset)
11553 m_SumFreeSize += lastSuballoc.size;
11554 suballocations2nd.pop_back();
11555 CleanupAfterFree();
11560 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11562 VmaSuballocation& lastSuballoc = suballocations1st.back();
11563 if(lastSuballoc.offset == offset)
11565 m_SumFreeSize += lastSuballoc.size;
11566 suballocations1st.pop_back();
11567 CleanupAfterFree();
11574 VmaSuballocation refSuballoc;
11575 refSuballoc.offset = offset;
11577 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11578 suballocations1st.begin() + m_1stNullItemsBeginCount,
11579 suballocations1st.end(),
11581 VmaSuballocationOffsetLess());
11582 if(it != suballocations1st.end())
11584 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11585 it->hAllocation = VK_NULL_HANDLE;
11586 ++m_1stNullItemsMiddleCount;
11587 m_SumFreeSize += it->size;
11588 CleanupAfterFree();
11593 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11596 VmaSuballocation refSuballoc;
11597 refSuballoc.offset = offset;
11599 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11600 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11601 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11602 if(it != suballocations2nd.end())
11604 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11605 it->hAllocation = VK_NULL_HANDLE;
11606 ++m_2ndNullItemsCount;
11607 m_SumFreeSize += it->size;
11608 CleanupAfterFree();
11613 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11616 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11618 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11619 const size_t suballocCount = AccessSuballocations1st().size();
11620 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11623 void VmaBlockMetadata_Linear::CleanupAfterFree()
11625 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11626 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11630 suballocations1st.clear();
11631 suballocations2nd.clear();
11632 m_1stNullItemsBeginCount = 0;
11633 m_1stNullItemsMiddleCount = 0;
11634 m_2ndNullItemsCount = 0;
11635 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11639 const size_t suballoc1stCount = suballocations1st.size();
11640 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11641 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11644 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11645 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11647 ++m_1stNullItemsBeginCount;
11648 --m_1stNullItemsMiddleCount;
11652 while(m_1stNullItemsMiddleCount > 0 &&
11653 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11655 --m_1stNullItemsMiddleCount;
11656 suballocations1st.pop_back();
11660 while(m_2ndNullItemsCount > 0 &&
11661 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11663 --m_2ndNullItemsCount;
11664 suballocations2nd.pop_back();
11668 while(m_2ndNullItemsCount > 0 &&
11669 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11671 --m_2ndNullItemsCount;
11672 VmaVectorRemove(suballocations2nd, 0);
11675 if(ShouldCompact1st())
11677 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11678 size_t srcIndex = m_1stNullItemsBeginCount;
11679 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11681 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11685 if(dstIndex != srcIndex)
11687 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11691 suballocations1st.resize(nonNullItemCount);
11692 m_1stNullItemsBeginCount = 0;
11693 m_1stNullItemsMiddleCount = 0;
11697 if(suballocations2nd.empty())
11699 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11703 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11705 suballocations1st.clear();
11706 m_1stNullItemsBeginCount = 0;
11708 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11711 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11712 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11713 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11714 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11716 ++m_1stNullItemsBeginCount;
11717 --m_1stNullItemsMiddleCount;
11719 m_2ndNullItemsCount = 0;
11720 m_1stVectorIndex ^= 1;
11725 VMA_HEAVY_ASSERT(Validate());
11732 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11733 VmaBlockMetadata(hAllocator),
11735 m_AllocationCount(0),
11739 memset(m_FreeList, 0,
sizeof(m_FreeList));
11742 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11744 DeleteNode(m_Root);
11747 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11749 VmaBlockMetadata::Init(size);
11751 m_UsableSize = VmaPrevPow2(size);
11752 m_SumFreeSize = m_UsableSize;
11756 while(m_LevelCount < MAX_LEVELS &&
11757 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11762 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11763 rootNode->offset = 0;
11764 rootNode->type = Node::TYPE_FREE;
11765 rootNode->parent = VMA_NULL;
11766 rootNode->buddy = VMA_NULL;
11769 AddToFreeListFront(0, rootNode);
11772 bool VmaBlockMetadata_Buddy::Validate()
const
11775 ValidationContext ctx;
11776 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11778 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11780 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11781 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11784 for(uint32_t level = 0; level < m_LevelCount; ++level)
11786 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11787 m_FreeList[level].front->free.prev == VMA_NULL);
11789 for(Node* node = m_FreeList[level].front;
11791 node = node->free.next)
11793 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11795 if(node->free.next == VMA_NULL)
11797 VMA_VALIDATE(m_FreeList[level].back == node);
11801 VMA_VALIDATE(node->free.next->free.prev == node);
11807 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11809 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11815 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11817 for(uint32_t level = 0; level < m_LevelCount; ++level)
11819 if(m_FreeList[level].front != VMA_NULL)
11821 return LevelToNodeSize(level);
11827 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11829 const VkDeviceSize unusableSize = GetUnusableSize();
11840 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11842 if(unusableSize > 0)
11851 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11853 const VkDeviceSize unusableSize = GetUnusableSize();
11855 inoutStats.
size += GetSize();
11856 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11861 if(unusableSize > 0)
11868 #if VMA_STATS_STRING_ENABLED
11870 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11874 CalcAllocationStatInfo(stat);
11876 PrintDetailedMap_Begin(
11882 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11884 const VkDeviceSize unusableSize = GetUnusableSize();
11885 if(unusableSize > 0)
11887 PrintDetailedMap_UnusedRange(json,
11892 PrintDetailedMap_End(json);
11895 #endif // #if VMA_STATS_STRING_ENABLED
11897 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11898 uint32_t currentFrameIndex,
11899 uint32_t frameInUseCount,
11900 VkDeviceSize bufferImageGranularity,
11901 VkDeviceSize allocSize,
11902 VkDeviceSize allocAlignment,
11904 VmaSuballocationType allocType,
11905 bool canMakeOtherLost,
11907 VmaAllocationRequest* pAllocationRequest)
11909 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11913 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11914 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11915 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11917 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11918 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11921 if(allocSize > m_UsableSize)
11926 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11927 for(uint32_t level = targetLevel + 1; level--; )
11929 for(Node* freeNode = m_FreeList[level].front;
11930 freeNode != VMA_NULL;
11931 freeNode = freeNode->free.next)
11933 if(freeNode->offset % allocAlignment == 0)
11935 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11936 pAllocationRequest->offset = freeNode->offset;
11937 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11938 pAllocationRequest->sumItemSize = 0;
11939 pAllocationRequest->itemsToMakeLostCount = 0;
11940 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11949 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11950 uint32_t currentFrameIndex,
11951 uint32_t frameInUseCount,
11952 VmaAllocationRequest* pAllocationRequest)
11958 return pAllocationRequest->itemsToMakeLostCount == 0;
11961 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11970 void VmaBlockMetadata_Buddy::Alloc(
11971 const VmaAllocationRequest& request,
11972 VmaSuballocationType type,
11973 VkDeviceSize allocSize,
11976 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11978 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11979 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11981 Node* currNode = m_FreeList[currLevel].front;
11982 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11983 while(currNode->offset != request.offset)
11985 currNode = currNode->free.next;
11986 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11990 while(currLevel < targetLevel)
11994 RemoveFromFreeList(currLevel, currNode);
11996 const uint32_t childrenLevel = currLevel + 1;
11999 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
12000 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
12002 leftChild->offset = currNode->offset;
12003 leftChild->type = Node::TYPE_FREE;
12004 leftChild->parent = currNode;
12005 leftChild->buddy = rightChild;
12007 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
12008 rightChild->type = Node::TYPE_FREE;
12009 rightChild->parent = currNode;
12010 rightChild->buddy = leftChild;
12013 currNode->type = Node::TYPE_SPLIT;
12014 currNode->split.leftChild = leftChild;
12017 AddToFreeListFront(childrenLevel, rightChild);
12018 AddToFreeListFront(childrenLevel, leftChild);
12023 currNode = m_FreeList[currLevel].front;
12032 VMA_ASSERT(currLevel == targetLevel &&
12033 currNode != VMA_NULL &&
12034 currNode->type == Node::TYPE_FREE);
12035 RemoveFromFreeList(currLevel, currNode);
12038 currNode->type = Node::TYPE_ALLOCATION;
12039 currNode->allocation.alloc = hAllocation;
12041 ++m_AllocationCount;
12043 m_SumFreeSize -= allocSize;
12046 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
12048 if(node->type == Node::TYPE_SPLIT)
12050 DeleteNode(node->split.leftChild->buddy);
12051 DeleteNode(node->split.leftChild);
12054 vma_delete(GetAllocationCallbacks(), node);
12057 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
12059 VMA_VALIDATE(level < m_LevelCount);
12060 VMA_VALIDATE(curr->parent == parent);
12061 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
12062 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
12065 case Node::TYPE_FREE:
12067 ctx.calculatedSumFreeSize += levelNodeSize;
12068 ++ctx.calculatedFreeCount;
12070 case Node::TYPE_ALLOCATION:
12071 ++ctx.calculatedAllocationCount;
12072 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
12073 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
12075 case Node::TYPE_SPLIT:
12077 const uint32_t childrenLevel = level + 1;
12078 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
12079 const Node*
const leftChild = curr->split.leftChild;
12080 VMA_VALIDATE(leftChild != VMA_NULL);
12081 VMA_VALIDATE(leftChild->offset == curr->offset);
12082 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
12084 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
12086 const Node*
const rightChild = leftChild->buddy;
12087 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
12088 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
12090 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
12101 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
12104 uint32_t level = 0;
12105 VkDeviceSize currLevelNodeSize = m_UsableSize;
12106 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
12107 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
12110 currLevelNodeSize = nextLevelNodeSize;
12111 nextLevelNodeSize = currLevelNodeSize >> 1;
12116 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
12119 Node* node = m_Root;
12120 VkDeviceSize nodeOffset = 0;
12121 uint32_t level = 0;
12122 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
12123 while(node->type == Node::TYPE_SPLIT)
12125 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
12126 if(offset < nodeOffset + nextLevelSize)
12128 node = node->split.leftChild;
12132 node = node->split.leftChild->buddy;
12133 nodeOffset += nextLevelSize;
12136 levelNodeSize = nextLevelSize;
12139 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
12140 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
12143 --m_AllocationCount;
12144 m_SumFreeSize += alloc->GetSize();
12146 node->type = Node::TYPE_FREE;
12149 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
12151 RemoveFromFreeList(level, node->buddy);
12152 Node*
const parent = node->parent;
12154 vma_delete(GetAllocationCallbacks(), node->buddy);
12155 vma_delete(GetAllocationCallbacks(), node);
12156 parent->type = Node::TYPE_FREE;
12164 AddToFreeListFront(level, node);
12167 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12171 case Node::TYPE_FREE:
12177 case Node::TYPE_ALLOCATION:
12179 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12185 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12186 if(unusedRangeSize > 0)
12195 case Node::TYPE_SPLIT:
12197 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12198 const Node*
const leftChild = node->split.leftChild;
12199 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12200 const Node*
const rightChild = leftChild->buddy;
12201 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12209 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12211 VMA_ASSERT(node->type == Node::TYPE_FREE);
12214 Node*
const frontNode = m_FreeList[level].front;
12215 if(frontNode == VMA_NULL)
12217 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12218 node->free.prev = node->free.next = VMA_NULL;
12219 m_FreeList[level].front = m_FreeList[level].back = node;
12223 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12224 node->free.prev = VMA_NULL;
12225 node->free.next = frontNode;
12226 frontNode->free.prev = node;
12227 m_FreeList[level].front = node;
12231 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12233 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12236 if(node->free.prev == VMA_NULL)
12238 VMA_ASSERT(m_FreeList[level].front == node);
12239 m_FreeList[level].front = node->free.next;
12243 Node*
const prevFreeNode = node->free.prev;
12244 VMA_ASSERT(prevFreeNode->free.next == node);
12245 prevFreeNode->free.next = node->free.next;
12249 if(node->free.next == VMA_NULL)
12251 VMA_ASSERT(m_FreeList[level].back == node);
12252 m_FreeList[level].back = node->free.prev;
12256 Node*
const nextFreeNode = node->free.next;
12257 VMA_ASSERT(nextFreeNode->free.prev == node);
12258 nextFreeNode->free.prev = node->free.prev;
12262 #if VMA_STATS_STRING_ENABLED
12263 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12267 case Node::TYPE_FREE:
12268 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12270 case Node::TYPE_ALLOCATION:
12272 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12273 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12274 if(allocSize < levelNodeSize)
12276 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12280 case Node::TYPE_SPLIT:
12282 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12283 const Node*
const leftChild = node->split.leftChild;
12284 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12285 const Node*
const rightChild = leftChild->buddy;
12286 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12293 #endif // #if VMA_STATS_STRING_ENABLED
12299 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12300 m_pMetadata(VMA_NULL),
12301 m_MemoryTypeIndex(UINT32_MAX),
12303 m_hMemory(VK_NULL_HANDLE),
12305 m_pMappedData(VMA_NULL)
12309 void VmaDeviceMemoryBlock::Init(
12312 uint32_t newMemoryTypeIndex,
12313 VkDeviceMemory newMemory,
12314 VkDeviceSize newSize,
12316 uint32_t algorithm)
12318 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12320 m_hParentPool = hParentPool;
12321 m_MemoryTypeIndex = newMemoryTypeIndex;
12323 m_hMemory = newMemory;
12328 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12331 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12337 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12339 m_pMetadata->Init(newSize);
12342 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12346 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12348 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12349 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12350 m_hMemory = VK_NULL_HANDLE;
12352 vma_delete(allocator, m_pMetadata);
12353 m_pMetadata = VMA_NULL;
12356 bool VmaDeviceMemoryBlock::Validate()
const
12358 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12359 (m_pMetadata->GetSize() != 0));
12361 return m_pMetadata->Validate();
12364 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12366 void* pData =
nullptr;
12367 VkResult res = Map(hAllocator, 1, &pData);
12368 if(res != VK_SUCCESS)
12373 res = m_pMetadata->CheckCorruption(pData);
12375 Unmap(hAllocator, 1);
12380 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12387 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12388 if(m_MapCount != 0)
12390 m_MapCount += count;
12391 VMA_ASSERT(m_pMappedData != VMA_NULL);
12392 if(ppData != VMA_NULL)
12394 *ppData = m_pMappedData;
12400 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12401 hAllocator->m_hDevice,
12407 if(result == VK_SUCCESS)
12409 if(ppData != VMA_NULL)
12411 *ppData = m_pMappedData;
12413 m_MapCount = count;
12419 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12426 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12427 if(m_MapCount >= count)
12429 m_MapCount -= count;
12430 if(m_MapCount == 0)
12432 m_pMappedData = VMA_NULL;
12433 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12438 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12442 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12444 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12445 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12448 VkResult res = Map(hAllocator, 1, &pData);
12449 if(res != VK_SUCCESS)
12454 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12455 VmaWriteMagicValue(pData, allocOffset + allocSize);
12457 Unmap(hAllocator, 1);
12462 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12464 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12465 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12468 VkResult res = Map(hAllocator, 1, &pData);
12469 if(res != VK_SUCCESS)
12474 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12476 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12478 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12480 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12483 Unmap(hAllocator, 1);
12488 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12491 VkDeviceSize allocationLocalOffset,
12495 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12496 hAllocation->GetBlock() ==
this);
12497 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12498 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12499 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12501 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12502 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12505 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12508 VkDeviceSize allocationLocalOffset,
12512 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12513 hAllocation->GetBlock() ==
this);
12514 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12515 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12516 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12518 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12519 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12524 memset(&outInfo, 0,
sizeof(outInfo));
12543 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12551 VmaPool_T::VmaPool_T(
12554 VkDeviceSize preferredBlockSize) :
12558 createInfo.memoryTypeIndex,
12559 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12560 createInfo.minBlockCount,
12561 createInfo.maxBlockCount,
12563 createInfo.frameInUseCount,
12564 createInfo.blockSize != 0,
12571 VmaPool_T::~VmaPool_T()
12575 void VmaPool_T::SetName(
const char* pName)
12577 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12578 VmaFreeString(allocs, m_Name);
12580 if(pName != VMA_NULL)
12582 m_Name = VmaCreateStringCopy(allocs, pName);
12590 #if VMA_STATS_STRING_ENABLED
12592 #endif // #if VMA_STATS_STRING_ENABLED
12594 VmaBlockVector::VmaBlockVector(
12597 uint32_t memoryTypeIndex,
12598 VkDeviceSize preferredBlockSize,
12599 size_t minBlockCount,
12600 size_t maxBlockCount,
12601 VkDeviceSize bufferImageGranularity,
12602 uint32_t frameInUseCount,
12603 bool explicitBlockSize,
12604 uint32_t algorithm) :
12605 m_hAllocator(hAllocator),
12606 m_hParentPool(hParentPool),
12607 m_MemoryTypeIndex(memoryTypeIndex),
12608 m_PreferredBlockSize(preferredBlockSize),
12609 m_MinBlockCount(minBlockCount),
12610 m_MaxBlockCount(maxBlockCount),
12611 m_BufferImageGranularity(bufferImageGranularity),
12612 m_FrameInUseCount(frameInUseCount),
12613 m_ExplicitBlockSize(explicitBlockSize),
12614 m_Algorithm(algorithm),
12615 m_HasEmptyBlock(false),
12616 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12621 VmaBlockVector::~VmaBlockVector()
12623 for(
size_t i = m_Blocks.size(); i--; )
12625 m_Blocks[i]->Destroy(m_hAllocator);
12626 vma_delete(m_hAllocator, m_Blocks[i]);
12630 VkResult VmaBlockVector::CreateMinBlocks()
12632 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12634 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12635 if(res != VK_SUCCESS)
12643 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12645 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12647 const size_t blockCount = m_Blocks.size();
12656 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12658 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12659 VMA_ASSERT(pBlock);
12660 VMA_HEAVY_ASSERT(pBlock->Validate());
12661 pBlock->m_pMetadata->AddPoolStats(*pStats);
12665 bool VmaBlockVector::IsEmpty()
12667 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12668 return m_Blocks.empty();
12671 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12673 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12674 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12675 (VMA_DEBUG_MARGIN > 0) &&
12677 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12680 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12682 VkResult VmaBlockVector::Allocate(
12683 uint32_t currentFrameIndex,
12685 VkDeviceSize alignment,
12687 VmaSuballocationType suballocType,
12688 size_t allocationCount,
12692 VkResult res = VK_SUCCESS;
12694 if(IsCorruptionDetectionEnabled())
12696 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12697 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12701 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12702 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12704 res = AllocatePage(
12710 pAllocations + allocIndex);
12711 if(res != VK_SUCCESS)
12718 if(res != VK_SUCCESS)
12721 while(allocIndex--)
12723 Free(pAllocations[allocIndex]);
12725 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12731 VkResult VmaBlockVector::AllocatePage(
12732 uint32_t currentFrameIndex,
12734 VkDeviceSize alignment,
12736 VmaSuballocationType suballocType,
12744 VkDeviceSize freeMemory;
12746 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12748 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12752 const bool canFallbackToDedicated = !IsCustomPool();
12753 const bool canCreateNewBlock =
12755 (m_Blocks.size() < m_MaxBlockCount) &&
12756 (freeMemory >= size || !canFallbackToDedicated);
12763 canMakeOtherLost =
false;
12767 if(isUpperAddress &&
12770 return VK_ERROR_FEATURE_NOT_PRESENT;
12784 return VK_ERROR_FEATURE_NOT_PRESENT;
12788 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12790 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12798 if(!canMakeOtherLost || canCreateNewBlock)
12807 if(!m_Blocks.empty())
12809 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12810 VMA_ASSERT(pCurrBlock);
12811 VkResult res = AllocateFromBlock(
12821 if(res == VK_SUCCESS)
12823 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12833 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12835 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12836 VMA_ASSERT(pCurrBlock);
12837 VkResult res = AllocateFromBlock(
12847 if(res == VK_SUCCESS)
12849 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12857 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12859 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12860 VMA_ASSERT(pCurrBlock);
12861 VkResult res = AllocateFromBlock(
12871 if(res == VK_SUCCESS)
12873 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12881 if(canCreateNewBlock)
12884 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12885 uint32_t newBlockSizeShift = 0;
12886 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12888 if(!m_ExplicitBlockSize)
12891 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12892 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12894 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12895 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12897 newBlockSize = smallerNewBlockSize;
12898 ++newBlockSizeShift;
12907 size_t newBlockIndex = 0;
12908 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12909 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12911 if(!m_ExplicitBlockSize)
12913 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12915 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12916 if(smallerNewBlockSize >= size)
12918 newBlockSize = smallerNewBlockSize;
12919 ++newBlockSizeShift;
12920 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12921 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12930 if(res == VK_SUCCESS)
12932 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12933 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12935 res = AllocateFromBlock(
12945 if(res == VK_SUCCESS)
12947 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12953 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12960 if(canMakeOtherLost)
12962 uint32_t tryIndex = 0;
12963 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12965 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12966 VmaAllocationRequest bestRequest = {};
12967 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12973 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12975 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12976 VMA_ASSERT(pCurrBlock);
12977 VmaAllocationRequest currRequest = {};
12978 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12981 m_BufferImageGranularity,
12990 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12991 if(pBestRequestBlock == VMA_NULL ||
12992 currRequestCost < bestRequestCost)
12994 pBestRequestBlock = pCurrBlock;
12995 bestRequest = currRequest;
12996 bestRequestCost = currRequestCost;
12998 if(bestRequestCost == 0)
13009 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13011 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
13012 VMA_ASSERT(pCurrBlock);
13013 VmaAllocationRequest currRequest = {};
13014 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
13017 m_BufferImageGranularity,
13026 const VkDeviceSize currRequestCost = currRequest.CalcCost();
13027 if(pBestRequestBlock == VMA_NULL ||
13028 currRequestCost < bestRequestCost ||
13031 pBestRequestBlock = pCurrBlock;
13032 bestRequest = currRequest;
13033 bestRequestCost = currRequestCost;
13035 if(bestRequestCost == 0 ||
13045 if(pBestRequestBlock != VMA_NULL)
13049 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
13050 if(res != VK_SUCCESS)
13056 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
13062 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13063 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
13064 UpdateHasEmptyBlock();
13065 (*pAllocation)->InitBlockAllocation(
13067 bestRequest.offset,
13074 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
13075 VMA_DEBUG_LOG(
" Returned from existing block");
13076 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
13077 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13078 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13080 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13082 if(IsCorruptionDetectionEnabled())
13084 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
13085 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13100 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
13102 return VK_ERROR_TOO_MANY_OBJECTS;
13106 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13109 void VmaBlockVector::Free(
13112 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
13114 bool budgetExceeded =
false;
13116 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
13118 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
13119 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
13124 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13126 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13128 if(IsCorruptionDetectionEnabled())
13130 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
13131 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
13134 if(hAllocation->IsPersistentMap())
13136 pBlock->Unmap(m_hAllocator, 1);
13139 pBlock->m_pMetadata->Free(hAllocation);
13140 VMA_HEAVY_ASSERT(pBlock->Validate());
13142 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
13144 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
13146 if(pBlock->m_pMetadata->IsEmpty())
13149 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
13151 pBlockToDelete = pBlock;
13158 else if(m_HasEmptyBlock && canDeleteBlock)
13160 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13161 if(pLastBlock->m_pMetadata->IsEmpty())
13163 pBlockToDelete = pLastBlock;
13164 m_Blocks.pop_back();
13168 UpdateHasEmptyBlock();
13169 IncrementallySortBlocks();
13174 if(pBlockToDelete != VMA_NULL)
13176 VMA_DEBUG_LOG(
" Deleted empty block");
13177 pBlockToDelete->Destroy(m_hAllocator);
13178 vma_delete(m_hAllocator, pBlockToDelete);
13182 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13184 VkDeviceSize result = 0;
13185 for(
size_t i = m_Blocks.size(); i--; )
13187 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13188 if(result >= m_PreferredBlockSize)
13196 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13198 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13200 if(m_Blocks[blockIndex] == pBlock)
13202 VmaVectorRemove(m_Blocks, blockIndex);
13209 void VmaBlockVector::IncrementallySortBlocks()
13214 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13216 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13218 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13225 VkResult VmaBlockVector::AllocateFromBlock(
13226 VmaDeviceMemoryBlock* pBlock,
13227 uint32_t currentFrameIndex,
13229 VkDeviceSize alignment,
13232 VmaSuballocationType suballocType,
13241 VmaAllocationRequest currRequest = {};
13242 if(pBlock->m_pMetadata->CreateAllocationRequest(
13245 m_BufferImageGranularity,
13255 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13259 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13260 if(res != VK_SUCCESS)
13266 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13267 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13268 UpdateHasEmptyBlock();
13269 (*pAllocation)->InitBlockAllocation(
13271 currRequest.offset,
13278 VMA_HEAVY_ASSERT(pBlock->Validate());
13279 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13280 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13281 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13283 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13285 if(IsCorruptionDetectionEnabled())
13287 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13288 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13292 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13295 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13297 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13298 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13299 allocInfo.allocationSize = blockSize;
13301 #if VMA_BUFFER_DEVICE_ADDRESS
13303 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13304 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13306 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13307 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13309 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
13311 VkDeviceMemory mem = VK_NULL_HANDLE;
13312 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13321 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13327 allocInfo.allocationSize,
13331 m_Blocks.push_back(pBlock);
13332 if(pNewBlockIndex != VMA_NULL)
13334 *pNewBlockIndex = m_Blocks.size() - 1;
13340 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13341 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13342 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13344 const size_t blockCount = m_Blocks.size();
13345 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13349 BLOCK_FLAG_USED = 0x00000001,
13350 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13358 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13359 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13360 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13363 const size_t moveCount = moves.size();
13364 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13366 const VmaDefragmentationMove& move = moves[moveIndex];
13367 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13368 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13371 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13374 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13376 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13377 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13378 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13380 currBlockInfo.pMappedData = pBlock->GetMappedData();
13382 if(currBlockInfo.pMappedData == VMA_NULL)
13384 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13385 if(pDefragCtx->res == VK_SUCCESS)
13387 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13394 if(pDefragCtx->res == VK_SUCCESS)
13396 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13397 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13399 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13401 const VmaDefragmentationMove& move = moves[moveIndex];
13403 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13404 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13406 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13411 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13412 memRange.memory = pSrcBlock->GetDeviceMemory();
13413 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13414 memRange.size = VMA_MIN(
13415 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13416 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13417 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13422 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13423 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13424 static_cast<size_t>(move.size));
13426 if(IsCorruptionDetectionEnabled())
13428 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13429 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13435 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13436 memRange.memory = pDstBlock->GetDeviceMemory();
13437 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13438 memRange.size = VMA_MIN(
13439 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13440 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13441 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13448 for(
size_t blockIndex = blockCount; blockIndex--; )
13450 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13451 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13453 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13454 pBlock->Unmap(m_hAllocator, 1);
13459 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13460 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13461 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13462 VkCommandBuffer commandBuffer)
13464 const size_t blockCount = m_Blocks.size();
13466 pDefragCtx->blockContexts.resize(blockCount);
13467 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13470 const size_t moveCount = moves.size();
13471 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13473 const VmaDefragmentationMove& move = moves[moveIndex];
13478 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13479 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13483 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13487 VkBufferCreateInfo bufCreateInfo;
13488 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13490 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13492 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13493 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13494 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13496 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13497 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13498 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13499 if(pDefragCtx->res == VK_SUCCESS)
13501 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13502 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13509 if(pDefragCtx->res == VK_SUCCESS)
13511 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13513 const VmaDefragmentationMove& move = moves[moveIndex];
13515 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13516 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13518 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13520 VkBufferCopy region = {
13524 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13525 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13530 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13532 pDefragCtx->res = VK_NOT_READY;
13538 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13540 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13541 if(pBlock->m_pMetadata->IsEmpty())
13543 if(m_Blocks.size() > m_MinBlockCount)
13545 if(pDefragmentationStats != VMA_NULL)
13548 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13551 VmaVectorRemove(m_Blocks, blockIndex);
13552 pBlock->Destroy(m_hAllocator);
13553 vma_delete(m_hAllocator, pBlock);
13561 UpdateHasEmptyBlock();
13564 void VmaBlockVector::UpdateHasEmptyBlock()
13566 m_HasEmptyBlock =
false;
13567 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13569 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13570 if(pBlock->m_pMetadata->IsEmpty())
13572 m_HasEmptyBlock =
true;
13578 #if VMA_STATS_STRING_ENABLED
13580 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13582 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13584 json.BeginObject();
13588 const char* poolName = m_hParentPool->GetName();
13589 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13591 json.WriteString(
"Name");
13592 json.WriteString(poolName);
13595 json.WriteString(
"MemoryTypeIndex");
13596 json.WriteNumber(m_MemoryTypeIndex);
13598 json.WriteString(
"BlockSize");
13599 json.WriteNumber(m_PreferredBlockSize);
13601 json.WriteString(
"BlockCount");
13602 json.BeginObject(
true);
13603 if(m_MinBlockCount > 0)
13605 json.WriteString(
"Min");
13606 json.WriteNumber((uint64_t)m_MinBlockCount);
13608 if(m_MaxBlockCount < SIZE_MAX)
13610 json.WriteString(
"Max");
13611 json.WriteNumber((uint64_t)m_MaxBlockCount);
13613 json.WriteString(
"Cur");
13614 json.WriteNumber((uint64_t)m_Blocks.size());
13617 if(m_FrameInUseCount > 0)
13619 json.WriteString(
"FrameInUseCount");
13620 json.WriteNumber(m_FrameInUseCount);
13623 if(m_Algorithm != 0)
13625 json.WriteString(
"Algorithm");
13626 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13631 json.WriteString(
"PreferredBlockSize");
13632 json.WriteNumber(m_PreferredBlockSize);
13635 json.WriteString(
"Blocks");
13636 json.BeginObject();
13637 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13639 json.BeginString();
13640 json.ContinueString(m_Blocks[i]->GetId());
13643 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13650 #endif // #if VMA_STATS_STRING_ENABLED
13652 void VmaBlockVector::Defragment(
13653 class VmaBlockVectorDefragmentationContext* pCtx,
13655 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13656 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13657 VkCommandBuffer commandBuffer)
13659 pCtx->res = VK_SUCCESS;
13661 const VkMemoryPropertyFlags memPropFlags =
13662 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13663 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13665 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13667 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13668 !IsCorruptionDetectionEnabled() &&
13669 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13672 if(canDefragmentOnCpu || canDefragmentOnGpu)
13674 bool defragmentOnGpu;
13676 if(canDefragmentOnGpu != canDefragmentOnCpu)
13678 defragmentOnGpu = canDefragmentOnGpu;
13683 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13684 m_hAllocator->IsIntegratedGpu();
13687 bool overlappingMoveSupported = !defragmentOnGpu;
13689 if(m_hAllocator->m_UseMutex)
13693 if(!m_Mutex.TryLockWrite())
13695 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13701 m_Mutex.LockWrite();
13702 pCtx->mutexLocked =
true;
13706 pCtx->Begin(overlappingMoveSupported, flags);
13710 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13711 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13712 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13715 if(pStats != VMA_NULL)
13717 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13718 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13721 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13722 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13723 if(defragmentOnGpu)
13725 maxGpuBytesToMove -= bytesMoved;
13726 maxGpuAllocationsToMove -= allocationsMoved;
13730 maxCpuBytesToMove -= bytesMoved;
13731 maxCpuAllocationsToMove -= allocationsMoved;
13737 if(m_hAllocator->m_UseMutex)
13738 m_Mutex.UnlockWrite();
13740 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13741 pCtx->res = VK_NOT_READY;
13746 if(pCtx->res >= VK_SUCCESS)
13748 if(defragmentOnGpu)
13750 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13754 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13760 void VmaBlockVector::DefragmentationEnd(
13761 class VmaBlockVectorDefragmentationContext* pCtx,
13767 VMA_ASSERT(pCtx->mutexLocked ==
false);
13771 m_Mutex.LockWrite();
13772 pCtx->mutexLocked =
true;
13776 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13779 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13781 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13782 if(blockCtx.hBuffer)
13784 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13788 if(pCtx->res >= VK_SUCCESS)
13790 FreeEmptyBlocks(pStats);
13794 if(pCtx->mutexLocked)
13796 VMA_ASSERT(m_hAllocator->m_UseMutex);
13797 m_Mutex.UnlockWrite();
13801 uint32_t VmaBlockVector::ProcessDefragmentations(
13802 class VmaBlockVectorDefragmentationContext *pCtx,
13805 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13807 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13809 for(uint32_t i = 0; i < moveCount; ++ i)
13811 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13814 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13815 pMove->
offset = move.dstOffset;
13820 pCtx->defragmentationMovesProcessed += moveCount;
13825 void VmaBlockVector::CommitDefragmentations(
13826 class VmaBlockVectorDefragmentationContext *pCtx,
13829 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13831 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13833 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13835 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13836 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13839 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13840 FreeEmptyBlocks(pStats);
13843 size_t VmaBlockVector::CalcAllocationCount()
const
13846 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13848 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13853 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13855 if(m_BufferImageGranularity == 1)
13859 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13860 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13862 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13863 VMA_ASSERT(m_Algorithm == 0);
13864 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13865 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13873 void VmaBlockVector::MakePoolAllocationsLost(
13874 uint32_t currentFrameIndex,
13875 size_t* pLostAllocationCount)
13877 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13878 size_t lostAllocationCount = 0;
13879 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13881 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13882 VMA_ASSERT(pBlock);
13883 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13885 if(pLostAllocationCount != VMA_NULL)
13887 *pLostAllocationCount = lostAllocationCount;
13891 VkResult VmaBlockVector::CheckCorruption()
13893 if(!IsCorruptionDetectionEnabled())
13895 return VK_ERROR_FEATURE_NOT_PRESENT;
13898 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13899 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13901 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13902 VMA_ASSERT(pBlock);
13903 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13904 if(res != VK_SUCCESS)
13912 void VmaBlockVector::AddStats(
VmaStats* pStats)
13914 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13915 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13917 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13919 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13921 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13922 VMA_ASSERT(pBlock);
13923 VMA_HEAVY_ASSERT(pBlock->Validate());
13925 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13926 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13927 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13928 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13935 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13937 VmaBlockVector* pBlockVector,
13938 uint32_t currentFrameIndex,
13939 bool overlappingMoveSupported) :
13940 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13941 m_AllocationCount(0),
13942 m_AllAllocations(false),
13944 m_AllocationsMoved(0),
13945 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13948 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13949 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13951 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13952 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13953 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13954 m_Blocks.push_back(pBlockInfo);
13958 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13961 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13963 for(
size_t i = m_Blocks.size(); i--; )
13965 vma_delete(m_hAllocator, m_Blocks[i]);
13969 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13972 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13974 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13975 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13976 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13978 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13979 (*it)->m_Allocations.push_back(allocInfo);
13986 ++m_AllocationCount;
13990 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13991 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13992 VkDeviceSize maxBytesToMove,
13993 uint32_t maxAllocationsToMove,
13994 bool freeOldAllocations)
13996 if(m_Blocks.empty())
14009 size_t srcBlockMinIndex = 0;
14022 size_t srcBlockIndex = m_Blocks.size() - 1;
14023 size_t srcAllocIndex = SIZE_MAX;
14029 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
14031 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
14034 if(srcBlockIndex == srcBlockMinIndex)
14041 srcAllocIndex = SIZE_MAX;
14046 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
14050 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
14051 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
14053 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
14054 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
14055 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
14056 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
14059 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
14061 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
14062 VmaAllocationRequest dstAllocRequest;
14063 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
14064 m_CurrentFrameIndex,
14065 m_pBlockVector->GetFrameInUseCount(),
14066 m_pBlockVector->GetBufferImageGranularity(),
14073 &dstAllocRequest) &&
14075 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
14077 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
14080 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
14081 (m_BytesMoved + size > maxBytesToMove))
14086 VmaDefragmentationMove move = {};
14087 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
14088 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
14089 move.srcOffset = srcOffset;
14090 move.dstOffset = dstAllocRequest.offset;
14092 move.hAllocation = allocInfo.m_hAllocation;
14093 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
14094 move.pDstBlock = pDstBlockInfo->m_pBlock;
14096 moves.push_back(move);
14098 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
14102 allocInfo.m_hAllocation);
14104 if(freeOldAllocations)
14106 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
14107 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
14110 if(allocInfo.m_pChanged != VMA_NULL)
14112 *allocInfo.m_pChanged = VK_TRUE;
14115 ++m_AllocationsMoved;
14116 m_BytesMoved += size;
14118 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
14126 if(srcAllocIndex > 0)
14132 if(srcBlockIndex > 0)
14135 srcAllocIndex = SIZE_MAX;
14145 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
14148 for(
size_t i = 0; i < m_Blocks.size(); ++i)
14150 if(m_Blocks[i]->m_HasNonMovableAllocations)
14158 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14159 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14160 VkDeviceSize maxBytesToMove,
14161 uint32_t maxAllocationsToMove,
14164 if(!m_AllAllocations && m_AllocationCount == 0)
14169 const size_t blockCount = m_Blocks.size();
14170 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14172 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14174 if(m_AllAllocations)
14176 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14177 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14178 it != pMetadata->m_Suballocations.end();
14181 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14183 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14184 pBlockInfo->m_Allocations.push_back(allocInfo);
14189 pBlockInfo->CalcHasNonMovableAllocations();
14193 pBlockInfo->SortAllocationsByOffsetDescending();
14199 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14202 const uint32_t roundCount = 2;
14205 VkResult result = VK_SUCCESS;
14206 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14214 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14215 size_t dstBlockIndex, VkDeviceSize dstOffset,
14216 size_t srcBlockIndex, VkDeviceSize srcOffset)
14218 if(dstBlockIndex < srcBlockIndex)
14222 if(dstBlockIndex > srcBlockIndex)
14226 if(dstOffset < srcOffset)
14236 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14238 VmaBlockVector* pBlockVector,
14239 uint32_t currentFrameIndex,
14240 bool overlappingMoveSupported) :
14241 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14242 m_OverlappingMoveSupported(overlappingMoveSupported),
14243 m_AllocationCount(0),
14244 m_AllAllocations(false),
14246 m_AllocationsMoved(0),
14247 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14249 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14253 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14257 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14258 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14259 VkDeviceSize maxBytesToMove,
14260 uint32_t maxAllocationsToMove,
14263 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14265 const size_t blockCount = m_pBlockVector->GetBlockCount();
14266 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14271 PreprocessMetadata();
14275 m_BlockInfos.resize(blockCount);
14276 for(
size_t i = 0; i < blockCount; ++i)
14278 m_BlockInfos[i].origBlockIndex = i;
14281 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14282 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14283 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14288 FreeSpaceDatabase freeSpaceDb;
14290 size_t dstBlockInfoIndex = 0;
14291 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14292 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14293 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14294 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14295 VkDeviceSize dstOffset = 0;
14298 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14300 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14301 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14302 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14303 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14304 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14306 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14307 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14308 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14309 if(m_AllocationsMoved == maxAllocationsToMove ||
14310 m_BytesMoved + srcAllocSize > maxBytesToMove)
14315 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14317 VmaDefragmentationMove move = {};
14319 size_t freeSpaceInfoIndex;
14320 VkDeviceSize dstAllocOffset;
14321 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14322 freeSpaceInfoIndex, dstAllocOffset))
14324 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14325 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14326 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14329 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14331 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14335 VmaSuballocation suballoc = *srcSuballocIt;
14336 suballoc.offset = dstAllocOffset;
14337 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14338 m_BytesMoved += srcAllocSize;
14339 ++m_AllocationsMoved;
14341 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14343 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14344 srcSuballocIt = nextSuballocIt;
14346 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14348 move.srcBlockIndex = srcOrigBlockIndex;
14349 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14350 move.srcOffset = srcAllocOffset;
14351 move.dstOffset = dstAllocOffset;
14352 move.size = srcAllocSize;
14354 moves.push_back(move);
14361 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14363 VmaSuballocation suballoc = *srcSuballocIt;
14364 suballoc.offset = dstAllocOffset;
14365 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14366 m_BytesMoved += srcAllocSize;
14367 ++m_AllocationsMoved;
14369 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14371 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14372 srcSuballocIt = nextSuballocIt;
14374 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14376 move.srcBlockIndex = srcOrigBlockIndex;
14377 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14378 move.srcOffset = srcAllocOffset;
14379 move.dstOffset = dstAllocOffset;
14380 move.size = srcAllocSize;
14382 moves.push_back(move);
14387 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14390 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14391 dstAllocOffset + srcAllocSize > dstBlockSize)
14394 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14396 ++dstBlockInfoIndex;
14397 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14398 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14399 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14400 dstBlockSize = pDstMetadata->GetSize();
14402 dstAllocOffset = 0;
14406 if(dstBlockInfoIndex == srcBlockInfoIndex)
14408 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14410 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14412 bool skipOver = overlap;
14413 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14417 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14422 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14424 dstOffset = srcAllocOffset + srcAllocSize;
14430 srcSuballocIt->offset = dstAllocOffset;
14431 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14432 dstOffset = dstAllocOffset + srcAllocSize;
14433 m_BytesMoved += srcAllocSize;
14434 ++m_AllocationsMoved;
14437 move.srcBlockIndex = srcOrigBlockIndex;
14438 move.dstBlockIndex = dstOrigBlockIndex;
14439 move.srcOffset = srcAllocOffset;
14440 move.dstOffset = dstAllocOffset;
14441 move.size = srcAllocSize;
14443 moves.push_back(move);
14451 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14452 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14454 VmaSuballocation suballoc = *srcSuballocIt;
14455 suballoc.offset = dstAllocOffset;
14456 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14457 dstOffset = dstAllocOffset + srcAllocSize;
14458 m_BytesMoved += srcAllocSize;
14459 ++m_AllocationsMoved;
14461 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14463 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14464 srcSuballocIt = nextSuballocIt;
14466 pDstMetadata->m_Suballocations.push_back(suballoc);
14468 move.srcBlockIndex = srcOrigBlockIndex;
14469 move.dstBlockIndex = dstOrigBlockIndex;
14470 move.srcOffset = srcAllocOffset;
14471 move.dstOffset = dstAllocOffset;
14472 move.size = srcAllocSize;
14474 moves.push_back(move);
14480 m_BlockInfos.clear();
14482 PostprocessMetadata();
14487 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14489 const size_t blockCount = m_pBlockVector->GetBlockCount();
14490 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14492 VmaBlockMetadata_Generic*
const pMetadata =
14493 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14494 pMetadata->m_FreeCount = 0;
14495 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14496 pMetadata->m_FreeSuballocationsBySize.clear();
14497 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14498 it != pMetadata->m_Suballocations.end(); )
14500 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14502 VmaSuballocationList::iterator nextIt = it;
14504 pMetadata->m_Suballocations.erase(it);
14515 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14517 const size_t blockCount = m_pBlockVector->GetBlockCount();
14518 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14520 VmaBlockMetadata_Generic*
const pMetadata =
14521 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14522 const VkDeviceSize blockSize = pMetadata->GetSize();
14525 if(pMetadata->m_Suballocations.empty())
14527 pMetadata->m_FreeCount = 1;
14529 VmaSuballocation suballoc = {
14533 VMA_SUBALLOCATION_TYPE_FREE };
14534 pMetadata->m_Suballocations.push_back(suballoc);
14535 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14540 VkDeviceSize offset = 0;
14541 VmaSuballocationList::iterator it;
14542 for(it = pMetadata->m_Suballocations.begin();
14543 it != pMetadata->m_Suballocations.end();
14546 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14547 VMA_ASSERT(it->offset >= offset);
14550 if(it->offset > offset)
14552 ++pMetadata->m_FreeCount;
14553 const VkDeviceSize freeSize = it->offset - offset;
14554 VmaSuballocation suballoc = {
14558 VMA_SUBALLOCATION_TYPE_FREE };
14559 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14560 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14562 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14566 pMetadata->m_SumFreeSize -= it->size;
14567 offset = it->offset + it->size;
14571 if(offset < blockSize)
14573 ++pMetadata->m_FreeCount;
14574 const VkDeviceSize freeSize = blockSize - offset;
14575 VmaSuballocation suballoc = {
14579 VMA_SUBALLOCATION_TYPE_FREE };
14580 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14581 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14582 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14584 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14589 pMetadata->m_FreeSuballocationsBySize.begin(),
14590 pMetadata->m_FreeSuballocationsBySize.end(),
14591 VmaSuballocationItemSizeLess());
14594 VMA_HEAVY_ASSERT(pMetadata->Validate());
14598 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14601 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14602 while(it != pMetadata->m_Suballocations.end())
14604 if(it->offset < suballoc.offset)
14609 pMetadata->m_Suballocations.insert(it, suballoc);
14615 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14618 VmaBlockVector* pBlockVector,
14619 uint32_t currFrameIndex) :
14621 mutexLocked(false),
14622 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14623 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14624 defragmentationMovesProcessed(0),
14625 defragmentationMovesCommitted(0),
14626 hasDefragmentationPlan(0),
14627 m_hAllocator(hAllocator),
14628 m_hCustomPool(hCustomPool),
14629 m_pBlockVector(pBlockVector),
14630 m_CurrFrameIndex(currFrameIndex),
14631 m_pAlgorithm(VMA_NULL),
14632 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14633 m_AllAllocations(false)
14637 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14639 vma_delete(m_hAllocator, m_pAlgorithm);
14642 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14644 AllocInfo info = { hAlloc, pChanged };
14645 m_Allocations.push_back(info);
14648 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14650 const bool allAllocations = m_AllAllocations ||
14651 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14664 if(VMA_DEBUG_MARGIN == 0 &&
14666 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14669 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14670 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14674 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14675 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14680 m_pAlgorithm->AddAll();
14684 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14686 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14694 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14696 uint32_t currFrameIndex,
14699 m_hAllocator(hAllocator),
14700 m_CurrFrameIndex(currFrameIndex),
14703 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14705 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14708 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14710 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14712 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14713 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14714 vma_delete(m_hAllocator, pBlockVectorCtx);
14716 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14718 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14719 if(pBlockVectorCtx)
14721 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14722 vma_delete(m_hAllocator, pBlockVectorCtx);
14727 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14729 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14731 VmaPool pool = pPools[poolIndex];
14734 if(pool->m_BlockVector.GetAlgorithm() == 0)
14736 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14738 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14740 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14742 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14747 if(!pBlockVectorDefragCtx)
14749 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14752 &pool->m_BlockVector,
14754 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14757 pBlockVectorDefragCtx->AddAll();
14762 void VmaDefragmentationContext_T::AddAllocations(
14763 uint32_t allocationCount,
14765 VkBool32* pAllocationsChanged)
14768 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14771 VMA_ASSERT(hAlloc);
14773 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14775 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14777 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14779 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14781 if(hAllocPool != VK_NULL_HANDLE)
14784 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14786 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14788 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14790 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14794 if(!pBlockVectorDefragCtx)
14796 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14799 &hAllocPool->m_BlockVector,
14801 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14808 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14809 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14810 if(!pBlockVectorDefragCtx)
14812 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14815 m_hAllocator->m_pBlockVectors[memTypeIndex],
14817 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14821 if(pBlockVectorDefragCtx)
14823 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14824 &pAllocationsChanged[allocIndex] : VMA_NULL;
14825 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14831 VkResult VmaDefragmentationContext_T::Defragment(
14832 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14833 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14845 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14846 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14848 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14849 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14851 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14852 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14855 return VK_NOT_READY;
14858 if(commandBuffer == VK_NULL_HANDLE)
14860 maxGpuBytesToMove = 0;
14861 maxGpuAllocationsToMove = 0;
14864 VkResult res = VK_SUCCESS;
14867 for(uint32_t memTypeIndex = 0;
14868 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14871 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14872 if(pBlockVectorCtx)
14874 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14875 pBlockVectorCtx->GetBlockVector()->Defragment(
14878 maxCpuBytesToMove, maxCpuAllocationsToMove,
14879 maxGpuBytesToMove, maxGpuAllocationsToMove,
14881 if(pBlockVectorCtx->res != VK_SUCCESS)
14883 res = pBlockVectorCtx->res;
14889 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14890 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14893 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14894 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14895 pBlockVectorCtx->GetBlockVector()->Defragment(
14898 maxCpuBytesToMove, maxCpuAllocationsToMove,
14899 maxGpuBytesToMove, maxGpuAllocationsToMove,
14901 if(pBlockVectorCtx->res != VK_SUCCESS)
14903 res = pBlockVectorCtx->res;
14916 for(uint32_t memTypeIndex = 0;
14917 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14920 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14921 if(pBlockVectorCtx)
14923 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14925 if(!pBlockVectorCtx->hasDefragmentationPlan)
14927 pBlockVectorCtx->GetBlockVector()->Defragment(
14930 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14931 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14934 if(pBlockVectorCtx->res < VK_SUCCESS)
14937 pBlockVectorCtx->hasDefragmentationPlan =
true;
14940 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14942 pCurrentMove, movesLeft);
14944 movesLeft -= processed;
14945 pCurrentMove += processed;
14950 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14951 customCtxIndex < customCtxCount;
14954 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14955 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14957 if(!pBlockVectorCtx->hasDefragmentationPlan)
14959 pBlockVectorCtx->GetBlockVector()->Defragment(
14962 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14963 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14966 if(pBlockVectorCtx->res < VK_SUCCESS)
14969 pBlockVectorCtx->hasDefragmentationPlan =
true;
14972 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14974 pCurrentMove, movesLeft);
14976 movesLeft -= processed;
14977 pCurrentMove += processed;
14984 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14986 VkResult res = VK_SUCCESS;
14989 for(uint32_t memTypeIndex = 0;
14990 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14993 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14994 if(pBlockVectorCtx)
14996 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14998 if(!pBlockVectorCtx->hasDefragmentationPlan)
15000 res = VK_NOT_READY;
15004 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15005 pBlockVectorCtx, m_pStats);
15007 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15008 res = VK_NOT_READY;
15013 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
15014 customCtxIndex < customCtxCount;
15017 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
15018 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
15020 if(!pBlockVectorCtx->hasDefragmentationPlan)
15022 res = VK_NOT_READY;
15026 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
15027 pBlockVectorCtx, m_pStats);
15029 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
15030 res = VK_NOT_READY;
15039 #if VMA_RECORDING_ENABLED
15041 VmaRecorder::VmaRecorder() :
15045 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
15051 m_UseMutex = useMutex;
15052 m_Flags = settings.
flags;
15054 #if defined(_WIN32)
15056 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
15060 return VK_ERROR_INITIALIZATION_FAILED;
15064 m_File = fopen(settings.
pFilePath,
"wb");
15068 return VK_ERROR_INITIALIZATION_FAILED;
15073 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
15074 fprintf(m_File,
"%s\n",
"1,8");
15079 VmaRecorder::~VmaRecorder()
15081 if(m_File != VMA_NULL)
15087 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
15089 CallParams callParams;
15090 GetBasicParams(callParams);
15092 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15093 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
15097 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
15099 CallParams callParams;
15100 GetBasicParams(callParams);
15102 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15103 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
15109 CallParams callParams;
15110 GetBasicParams(callParams);
15112 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15113 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
15124 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
15126 CallParams callParams;
15127 GetBasicParams(callParams);
15129 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15130 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
15135 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
15136 const VkMemoryRequirements& vkMemReq,
15140 CallParams callParams;
15141 GetBasicParams(callParams);
15143 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15144 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15145 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15147 vkMemReq.alignment,
15148 vkMemReq.memoryTypeBits,
15156 userDataStr.GetString());
15160 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15161 const VkMemoryRequirements& vkMemReq,
15163 uint64_t allocationCount,
15166 CallParams callParams;
15167 GetBasicParams(callParams);
15169 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15170 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15171 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15173 vkMemReq.alignment,
15174 vkMemReq.memoryTypeBits,
15181 PrintPointerList(allocationCount, pAllocations);
15182 fprintf(m_File,
",%s\n", userDataStr.GetString());
15186 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15187 const VkMemoryRequirements& vkMemReq,
15188 bool requiresDedicatedAllocation,
15189 bool prefersDedicatedAllocation,
15193 CallParams callParams;
15194 GetBasicParams(callParams);
15196 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15197 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15198 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15200 vkMemReq.alignment,
15201 vkMemReq.memoryTypeBits,
15202 requiresDedicatedAllocation ? 1 : 0,
15203 prefersDedicatedAllocation ? 1 : 0,
15211 userDataStr.GetString());
15215 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15216 const VkMemoryRequirements& vkMemReq,
15217 bool requiresDedicatedAllocation,
15218 bool prefersDedicatedAllocation,
15222 CallParams callParams;
15223 GetBasicParams(callParams);
15225 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15226 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15227 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15229 vkMemReq.alignment,
15230 vkMemReq.memoryTypeBits,
15231 requiresDedicatedAllocation ? 1 : 0,
15232 prefersDedicatedAllocation ? 1 : 0,
15240 userDataStr.GetString());
15244 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15247 CallParams callParams;
15248 GetBasicParams(callParams);
15250 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15251 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15256 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15257 uint64_t allocationCount,
15260 CallParams callParams;
15261 GetBasicParams(callParams);
15263 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15264 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15265 PrintPointerList(allocationCount, pAllocations);
15266 fprintf(m_File,
"\n");
15270 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15272 const void* pUserData)
15274 CallParams callParams;
15275 GetBasicParams(callParams);
15277 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15278 UserDataString userDataStr(
15281 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15283 userDataStr.GetString());
15287 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15290 CallParams callParams;
15291 GetBasicParams(callParams);
15293 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15294 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15299 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15302 CallParams callParams;
15303 GetBasicParams(callParams);
15305 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15306 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15311 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15314 CallParams callParams;
15315 GetBasicParams(callParams);
15317 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15318 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15323 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15324 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15326 CallParams callParams;
15327 GetBasicParams(callParams);
15329 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15330 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15337 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15338 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15340 CallParams callParams;
15341 GetBasicParams(callParams);
15343 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15344 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15351 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15352 const VkBufferCreateInfo& bufCreateInfo,
15356 CallParams callParams;
15357 GetBasicParams(callParams);
15359 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15360 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15361 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15362 bufCreateInfo.flags,
15363 bufCreateInfo.size,
15364 bufCreateInfo.usage,
15365 bufCreateInfo.sharingMode,
15366 allocCreateInfo.
flags,
15367 allocCreateInfo.
usage,
15371 allocCreateInfo.
pool,
15373 userDataStr.GetString());
15377 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15378 const VkImageCreateInfo& imageCreateInfo,
15382 CallParams callParams;
15383 GetBasicParams(callParams);
15385 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15386 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15387 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15388 imageCreateInfo.flags,
15389 imageCreateInfo.imageType,
15390 imageCreateInfo.format,
15391 imageCreateInfo.extent.width,
15392 imageCreateInfo.extent.height,
15393 imageCreateInfo.extent.depth,
15394 imageCreateInfo.mipLevels,
15395 imageCreateInfo.arrayLayers,
15396 imageCreateInfo.samples,
15397 imageCreateInfo.tiling,
15398 imageCreateInfo.usage,
15399 imageCreateInfo.sharingMode,
15400 imageCreateInfo.initialLayout,
15401 allocCreateInfo.
flags,
15402 allocCreateInfo.
usage,
15406 allocCreateInfo.
pool,
15408 userDataStr.GetString());
15412 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15415 CallParams callParams;
15416 GetBasicParams(callParams);
15418 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15419 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15424 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15427 CallParams callParams;
15428 GetBasicParams(callParams);
15430 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15431 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15436 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15439 CallParams callParams;
15440 GetBasicParams(callParams);
15442 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15443 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15448 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15451 CallParams callParams;
15452 GetBasicParams(callParams);
15454 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15455 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15460 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15463 CallParams callParams;
15464 GetBasicParams(callParams);
15466 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15467 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15472 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15476 CallParams callParams;
15477 GetBasicParams(callParams);
15479 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15480 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15483 fprintf(m_File,
",");
15485 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15495 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15498 CallParams callParams;
15499 GetBasicParams(callParams);
15501 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15502 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15507 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15511 CallParams callParams;
15512 GetBasicParams(callParams);
15514 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15515 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15516 pool, name != VMA_NULL ? name :
"");
15522 if(pUserData != VMA_NULL)
15526 m_Str = (
const char*)pUserData;
15531 snprintf(m_PtrStr, 17,
"%p", pUserData);
15541 void VmaRecorder::WriteConfiguration(
15542 const VkPhysicalDeviceProperties& devProps,
15543 const VkPhysicalDeviceMemoryProperties& memProps,
15544 uint32_t vulkanApiVersion,
15545 bool dedicatedAllocationExtensionEnabled,
15546 bool bindMemory2ExtensionEnabled,
15547 bool memoryBudgetExtensionEnabled,
15548 bool deviceCoherentMemoryExtensionEnabled)
15550 fprintf(m_File,
"Config,Begin\n");
15552 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15554 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15555 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15556 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15557 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15558 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15559 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15561 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15562 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15563 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15565 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15566 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15568 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15569 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15571 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15572 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15574 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15575 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15578 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15579 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15580 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15581 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15583 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15584 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15585 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15586 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15587 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15588 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15589 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15590 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15591 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15593 fprintf(m_File,
"Config,End\n");
15596 void VmaRecorder::GetBasicParams(CallParams& outParams)
15598 #if defined(_WIN32)
15599 outParams.threadId = GetCurrentThreadId();
15604 std::thread::id thread_id = std::this_thread::get_id();
15605 stringstream thread_id_to_string_converter;
15606 thread_id_to_string_converter << thread_id;
15607 string thread_id_as_string = thread_id_to_string_converter.str();
15608 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15611 auto current_time = std::chrono::high_resolution_clock::now();
15613 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15616 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15620 fprintf(m_File,
"%p", pItems[0]);
15621 for(uint64_t i = 1; i < count; ++i)
15623 fprintf(m_File,
" %p", pItems[i]);
15628 void VmaRecorder::Flush()
15636 #endif // #if VMA_RECORDING_ENABLED
15641 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15642 m_Allocator(pAllocationCallbacks, 1024)
15646 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15648 VmaMutexLock mutexLock(m_Mutex);
15649 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15652 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15654 VmaMutexLock mutexLock(m_Mutex);
15655 m_Allocator.Free(hAlloc);
15663 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15669 m_hDevice(pCreateInfo->device),
15670 m_hInstance(pCreateInfo->instance),
15671 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15672 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15673 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15674 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15675 m_HeapSizeLimitMask(0),
15676 m_PreferredLargeHeapBlockSize(0),
15677 m_PhysicalDevice(pCreateInfo->physicalDevice),
15678 m_CurrentFrameIndex(0),
15679 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15680 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15682 m_GlobalMemoryTypeBits(UINT32_MAX)
15684 ,m_pRecorder(VMA_NULL)
15687 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15689 m_UseKhrDedicatedAllocation =
false;
15690 m_UseKhrBindMemory2 =
false;
15693 if(VMA_DEBUG_DETECT_CORRUPTION)
15696 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15701 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15703 #if !(VMA_DEDICATED_ALLOCATION)
15706 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15709 #if !(VMA_BIND_MEMORY2)
15712 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15716 #if !(VMA_MEMORY_BUDGET)
15719 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15722 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15723 if(m_UseKhrBufferDeviceAddress)
15725 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15728 #if VMA_VULKAN_VERSION < 1002000
15729 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15731 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15734 #if VMA_VULKAN_VERSION < 1001000
15735 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15737 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15741 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15742 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15743 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15745 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15746 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15747 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15758 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15759 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15761 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15762 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15763 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15764 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15769 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15773 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15775 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15776 if(limit != VK_WHOLE_SIZE)
15778 m_HeapSizeLimitMask |= 1u << heapIndex;
15779 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15781 m_MemProps.memoryHeaps[heapIndex].size = limit;
15787 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15789 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15791 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15795 preferredBlockSize,
15798 GetBufferImageGranularity(),
15804 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15811 VkResult res = VK_SUCCESS;
15816 #if VMA_RECORDING_ENABLED
15817 m_pRecorder = vma_new(
this, VmaRecorder)();
15819 if(res != VK_SUCCESS)
15823 m_pRecorder->WriteConfiguration(
15824 m_PhysicalDeviceProperties,
15826 m_VulkanApiVersion,
15827 m_UseKhrDedicatedAllocation,
15828 m_UseKhrBindMemory2,
15829 m_UseExtMemoryBudget,
15830 m_UseAmdDeviceCoherentMemory);
15831 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15833 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15834 return VK_ERROR_FEATURE_NOT_PRESENT;
15838 #if VMA_MEMORY_BUDGET
15839 if(m_UseExtMemoryBudget)
15841 UpdateVulkanBudget();
15843 #endif // #if VMA_MEMORY_BUDGET
15848 VmaAllocator_T::~VmaAllocator_T()
15850 #if VMA_RECORDING_ENABLED
15851 if(m_pRecorder != VMA_NULL)
15853 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15854 vma_delete(
this, m_pRecorder);
15858 VMA_ASSERT(m_Pools.empty());
15860 for(
size_t i = GetMemoryTypeCount(); i--; )
15862 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15864 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15867 vma_delete(
this, m_pDedicatedAllocations[i]);
15868 vma_delete(
this, m_pBlockVectors[i]);
15872 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15874 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15875 ImportVulkanFunctions_Static();
15878 if(pVulkanFunctions != VMA_NULL)
15880 ImportVulkanFunctions_Custom(pVulkanFunctions);
15883 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15884 ImportVulkanFunctions_Dynamic();
15887 ValidateVulkanFunctions();
15890 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15892 void VmaAllocator_T::ImportVulkanFunctions_Static()
15895 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15896 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15897 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15898 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15899 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15900 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15901 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15902 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15903 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15904 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15905 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15906 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15907 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15908 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15909 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15910 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15911 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15914 #if VMA_VULKAN_VERSION >= 1001000
15915 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15917 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15918 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15919 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15920 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15921 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15926 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15928 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15930 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15932 #define VMA_COPY_IF_NOT_NULL(funcName) \
15933 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15935 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15936 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15937 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15938 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15939 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15940 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15941 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15942 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15943 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15944 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15945 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15946 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15947 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15948 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15949 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15950 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15951 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15953 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15954 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15955 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15958 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15959 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15960 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15963 #if VMA_MEMORY_BUDGET
15964 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15967 #undef VMA_COPY_IF_NOT_NULL
15970 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15972 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15974 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15975 if(m_VulkanFunctions.memberName == VMA_NULL) \
15976 m_VulkanFunctions.memberName = \
15977 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15978 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15979 if(m_VulkanFunctions.memberName == VMA_NULL) \
15980 m_VulkanFunctions.memberName = \
15981 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15983 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15984 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15985 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15986 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15987 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15988 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15989 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15990 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15991 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15992 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15993 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15994 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15995 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15996 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15997 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15998 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15999 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
16001 #if VMA_VULKAN_VERSION >= 1001000
16002 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16004 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
16005 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
16006 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
16007 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
16008 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
16012 #if VMA_DEDICATED_ALLOCATION
16013 if(m_UseKhrDedicatedAllocation)
16015 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
16016 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
16020 #if VMA_BIND_MEMORY2
16021 if(m_UseKhrBindMemory2)
16023 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
16024 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
16026 #endif // #if VMA_BIND_MEMORY2
16028 #if VMA_MEMORY_BUDGET
16029 if(m_UseExtMemoryBudget)
16031 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
16033 #endif // #if VMA_MEMORY_BUDGET
16035 #undef VMA_FETCH_DEVICE_FUNC
16036 #undef VMA_FETCH_INSTANCE_FUNC
16039 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
16041 void VmaAllocator_T::ValidateVulkanFunctions()
16043 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
16044 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
16045 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
16046 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
16047 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
16048 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
16049 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
16050 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
16051 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
16052 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
16053 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
16054 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
16055 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
16056 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
16057 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
16058 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
16059 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
16061 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16062 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
16064 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
16065 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
16069 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
16070 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
16072 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
16073 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
16077 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
16078 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16080 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
16085 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
16087 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16088 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16089 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
16090 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
16093 VkResult VmaAllocator_T::AllocateMemoryOfType(
16095 VkDeviceSize alignment,
16096 bool dedicatedAllocation,
16097 VkBuffer dedicatedBuffer,
16098 VkBufferUsageFlags dedicatedBufferUsage,
16099 VkImage dedicatedImage,
16101 uint32_t memTypeIndex,
16102 VmaSuballocationType suballocType,
16103 size_t allocationCount,
16106 VMA_ASSERT(pAllocations != VMA_NULL);
16107 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
16113 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16123 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
16124 VMA_ASSERT(blockVector);
16126 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
16127 bool preferDedicatedMemory =
16128 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
16129 dedicatedAllocation ||
16131 size > preferredBlockSize / 2;
16133 if(preferDedicatedMemory &&
16135 finalCreateInfo.
pool == VK_NULL_HANDLE)
16144 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16148 return AllocateDedicatedMemory(
16157 dedicatedBufferUsage,
16165 VkResult res = blockVector->Allocate(
16166 m_CurrentFrameIndex.load(),
16173 if(res == VK_SUCCESS)
16181 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16185 res = AllocateDedicatedMemory(
16194 dedicatedBufferUsage,
16198 if(res == VK_SUCCESS)
16201 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16207 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16214 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16216 VmaSuballocationType suballocType,
16217 uint32_t memTypeIndex,
16220 bool isUserDataString,
16222 VkBuffer dedicatedBuffer,
16223 VkBufferUsageFlags dedicatedBufferUsage,
16224 VkImage dedicatedImage,
16225 size_t allocationCount,
16228 VMA_ASSERT(allocationCount > 0 && pAllocations);
16232 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16234 GetBudget(&heapBudget, heapIndex, 1);
16235 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16237 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16241 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16242 allocInfo.memoryTypeIndex = memTypeIndex;
16243 allocInfo.allocationSize = size;
16245 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16246 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16247 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16249 if(dedicatedBuffer != VK_NULL_HANDLE)
16251 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16252 dedicatedAllocInfo.buffer = dedicatedBuffer;
16253 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16255 else if(dedicatedImage != VK_NULL_HANDLE)
16257 dedicatedAllocInfo.image = dedicatedImage;
16258 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16261 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16263 #if VMA_BUFFER_DEVICE_ADDRESS
16264 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16265 if(m_UseKhrBufferDeviceAddress)
16267 bool canContainBufferWithDeviceAddress =
true;
16268 if(dedicatedBuffer != VK_NULL_HANDLE)
16270 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16271 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16273 else if(dedicatedImage != VK_NULL_HANDLE)
16275 canContainBufferWithDeviceAddress =
false;
16277 if(canContainBufferWithDeviceAddress)
16279 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16280 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16283 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
16286 VkResult res = VK_SUCCESS;
16287 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16289 res = AllocateDedicatedMemoryPage(
16297 pAllocations + allocIndex);
16298 if(res != VK_SUCCESS)
16304 if(res == VK_SUCCESS)
16308 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16309 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16310 VMA_ASSERT(pDedicatedAllocations);
16311 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16313 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
16317 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16322 while(allocIndex--)
16325 VkDeviceMemory hMemory = currAlloc->GetMemory();
16337 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16338 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16339 currAlloc->SetUserData(
this, VMA_NULL);
16340 m_AllocationObjectAllocator.Free(currAlloc);
16343 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16349 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16351 VmaSuballocationType suballocType,
16352 uint32_t memTypeIndex,
16353 const VkMemoryAllocateInfo& allocInfo,
16355 bool isUserDataString,
16359 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16360 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16363 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16367 void* pMappedData = VMA_NULL;
16370 res = (*m_VulkanFunctions.vkMapMemory)(
16379 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16380 FreeVulkanMemory(memTypeIndex, size, hMemory);
16385 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16386 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16387 (*pAllocation)->SetUserData(
this, pUserData);
16388 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16389 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16391 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16397 void VmaAllocator_T::GetBufferMemoryRequirements(
16399 VkMemoryRequirements& memReq,
16400 bool& requiresDedicatedAllocation,
16401 bool& prefersDedicatedAllocation)
const
16403 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16404 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16406 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16407 memReqInfo.buffer = hBuffer;
16409 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16411 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16412 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16414 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16416 memReq = memReq2.memoryRequirements;
16417 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16418 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16421 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16423 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16424 requiresDedicatedAllocation =
false;
16425 prefersDedicatedAllocation =
false;
16429 void VmaAllocator_T::GetImageMemoryRequirements(
16431 VkMemoryRequirements& memReq,
16432 bool& requiresDedicatedAllocation,
16433 bool& prefersDedicatedAllocation)
const
16435 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16436 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16438 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16439 memReqInfo.image = hImage;
16441 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16443 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16444 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16446 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16448 memReq = memReq2.memoryRequirements;
16449 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16450 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16453 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16455 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16456 requiresDedicatedAllocation =
false;
16457 prefersDedicatedAllocation =
false;
16461 VkResult VmaAllocator_T::AllocateMemory(
16462 const VkMemoryRequirements& vkMemReq,
16463 bool requiresDedicatedAllocation,
16464 bool prefersDedicatedAllocation,
16465 VkBuffer dedicatedBuffer,
16466 VkBufferUsageFlags dedicatedBufferUsage,
16467 VkImage dedicatedImage,
16469 VmaSuballocationType suballocType,
16470 size_t allocationCount,
16473 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16475 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16477 if(vkMemReq.size == 0)
16479 return VK_ERROR_VALIDATION_FAILED_EXT;
16484 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16485 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16490 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16491 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16493 if(requiresDedicatedAllocation)
16497 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16498 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16500 if(createInfo.
pool != VK_NULL_HANDLE)
16502 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16503 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16506 if((createInfo.
pool != VK_NULL_HANDLE) &&
16509 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16510 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16513 if(createInfo.
pool != VK_NULL_HANDLE)
16515 const VkDeviceSize alignmentForPool = VMA_MAX(
16516 vkMemReq.alignment,
16517 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16522 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16527 return createInfo.
pool->m_BlockVector.Allocate(
16528 m_CurrentFrameIndex.load(),
16539 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16540 uint32_t memTypeIndex = UINT32_MAX;
16542 if(res == VK_SUCCESS)
16544 VkDeviceSize alignmentForMemType = VMA_MAX(
16545 vkMemReq.alignment,
16546 GetMemoryTypeMinAlignment(memTypeIndex));
16548 res = AllocateMemoryOfType(
16550 alignmentForMemType,
16551 requiresDedicatedAllocation || prefersDedicatedAllocation,
16553 dedicatedBufferUsage,
16561 if(res == VK_SUCCESS)
16571 memoryTypeBits &= ~(1u << memTypeIndex);
16574 if(res == VK_SUCCESS)
16576 alignmentForMemType = VMA_MAX(
16577 vkMemReq.alignment,
16578 GetMemoryTypeMinAlignment(memTypeIndex));
16580 res = AllocateMemoryOfType(
16582 alignmentForMemType,
16583 requiresDedicatedAllocation || prefersDedicatedAllocation,
16585 dedicatedBufferUsage,
16593 if(res == VK_SUCCESS)
16603 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16614 void VmaAllocator_T::FreeMemory(
16615 size_t allocationCount,
16618 VMA_ASSERT(pAllocations);
16620 for(
size_t allocIndex = allocationCount; allocIndex--; )
16624 if(allocation != VK_NULL_HANDLE)
16626 if(TouchAllocation(allocation))
16628 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16630 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16633 switch(allocation->GetType())
16635 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16637 VmaBlockVector* pBlockVector = VMA_NULL;
16638 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16639 if(hPool != VK_NULL_HANDLE)
16641 pBlockVector = &hPool->m_BlockVector;
16645 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16646 pBlockVector = m_pBlockVectors[memTypeIndex];
16648 pBlockVector->Free(allocation);
16651 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16652 FreeDedicatedMemory(allocation);
16660 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16661 allocation->SetUserData(
this, VMA_NULL);
16662 m_AllocationObjectAllocator.Free(allocation);
16667 VkResult VmaAllocator_T::ResizeAllocation(
16669 VkDeviceSize newSize)
16672 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16674 return VK_ERROR_VALIDATION_FAILED_EXT;
16676 if(newSize == alloc->GetSize())
16680 return VK_ERROR_OUT_OF_POOL_MEMORY;
16683 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16686 InitStatInfo(pStats->
total);
16687 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16689 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16693 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16695 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16696 VMA_ASSERT(pBlockVector);
16697 pBlockVector->AddStats(pStats);
16702 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16703 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16705 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16710 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16712 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16713 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16714 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16715 VMA_ASSERT(pDedicatedAllocVector);
16716 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16719 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16720 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16721 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16722 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16727 VmaPostprocessCalcStatInfo(pStats->
total);
16728 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16729 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16730 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16731 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16734 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16736 #if VMA_MEMORY_BUDGET
16737 if(m_UseExtMemoryBudget)
16739 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16741 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16742 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16744 const uint32_t heapIndex = firstHeap + i;
16746 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16749 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16751 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16752 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16756 outBudget->
usage = 0;
16760 outBudget->
budget = VMA_MIN(
16761 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16766 UpdateVulkanBudget();
16767 GetBudget(outBudget, firstHeap, heapCount);
16773 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16775 const uint32_t heapIndex = firstHeap + i;
16777 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16781 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16786 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16788 VkResult VmaAllocator_T::DefragmentationBegin(
16798 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16799 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16802 (*pContext)->AddAllocations(
16805 VkResult res = (*pContext)->Defragment(
16810 if(res != VK_NOT_READY)
16812 vma_delete(
this, *pContext);
16813 *pContext = VMA_NULL;
16819 VkResult VmaAllocator_T::DefragmentationEnd(
16822 vma_delete(
this, context);
16826 VkResult VmaAllocator_T::DefragmentationPassBegin(
16830 return context->DefragmentPassBegin(pInfo);
16832 VkResult VmaAllocator_T::DefragmentationPassEnd(
16835 return context->DefragmentPassEnd();
16841 if(hAllocation->CanBecomeLost())
16847 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16848 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16851 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16855 pAllocationInfo->
offset = 0;
16856 pAllocationInfo->
size = hAllocation->GetSize();
16858 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16861 else if(localLastUseFrameIndex == localCurrFrameIndex)
16863 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16864 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16865 pAllocationInfo->
offset = hAllocation->GetOffset();
16866 pAllocationInfo->
size = hAllocation->GetSize();
16868 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16873 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16875 localLastUseFrameIndex = localCurrFrameIndex;
16882 #if VMA_STATS_STRING_ENABLED
16883 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16884 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16887 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16888 if(localLastUseFrameIndex == localCurrFrameIndex)
16894 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16896 localLastUseFrameIndex = localCurrFrameIndex;
16902 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16903 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16904 pAllocationInfo->
offset = hAllocation->GetOffset();
16905 pAllocationInfo->
size = hAllocation->GetSize();
16906 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16907 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16911 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16914 if(hAllocation->CanBecomeLost())
16916 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16917 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16920 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16924 else if(localLastUseFrameIndex == localCurrFrameIndex)
16930 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16932 localLastUseFrameIndex = localCurrFrameIndex;
16939 #if VMA_STATS_STRING_ENABLED
16940 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16941 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16944 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16945 if(localLastUseFrameIndex == localCurrFrameIndex)
16951 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16953 localLastUseFrameIndex = localCurrFrameIndex;
16965 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16975 return VK_ERROR_INITIALIZATION_FAILED;
16979 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16981 return VK_ERROR_FEATURE_NOT_PRESENT;
16984 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16986 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16988 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16989 if(res != VK_SUCCESS)
16991 vma_delete(
this, *pPool);
16998 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16999 (*pPool)->SetId(m_NextPoolId++);
17000 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
17006 void VmaAllocator_T::DestroyPool(
VmaPool pool)
17010 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
17011 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
17012 VMA_ASSERT(success &&
"Pool not found in Allocator.");
17015 vma_delete(
this, pool);
17020 pool->m_BlockVector.GetPoolStats(pPoolStats);
17023 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
17025 m_CurrentFrameIndex.store(frameIndex);
17027 #if VMA_MEMORY_BUDGET
17028 if(m_UseExtMemoryBudget)
17030 UpdateVulkanBudget();
17032 #endif // #if VMA_MEMORY_BUDGET
17035 void VmaAllocator_T::MakePoolAllocationsLost(
17037 size_t* pLostAllocationCount)
17039 hPool->m_BlockVector.MakePoolAllocationsLost(
17040 m_CurrentFrameIndex.load(),
17041 pLostAllocationCount);
17044 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
17046 return hPool->m_BlockVector.CheckCorruption();
17049 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
17051 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
17054 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17056 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
17058 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
17059 VMA_ASSERT(pBlockVector);
17060 VkResult localRes = pBlockVector->CheckCorruption();
17063 case VK_ERROR_FEATURE_NOT_PRESENT:
17066 finalRes = VK_SUCCESS;
17076 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17077 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
17079 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
17081 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
17084 case VK_ERROR_FEATURE_NOT_PRESENT:
17087 finalRes = VK_SUCCESS;
17099 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
17101 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
17102 (*pAllocation)->InitLost();
17105 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
17107 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
17110 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
17112 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
17113 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
17116 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
17117 if(blockBytesAfterAllocation > heapSize)
17119 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
17121 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
17129 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
17133 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
17135 if(res == VK_SUCCESS)
17137 #if VMA_MEMORY_BUDGET
17138 ++m_Budget.m_OperationsSinceBudgetFetch;
17142 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
17144 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
17149 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17155 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17158 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17160 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17164 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17166 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17169 VkResult VmaAllocator_T::BindVulkanBuffer(
17170 VkDeviceMemory memory,
17171 VkDeviceSize memoryOffset,
17175 if(pNext != VMA_NULL)
17177 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17178 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17179 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17181 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17182 bindBufferMemoryInfo.pNext = pNext;
17183 bindBufferMemoryInfo.buffer = buffer;
17184 bindBufferMemoryInfo.memory = memory;
17185 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17186 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17189 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17191 return VK_ERROR_EXTENSION_NOT_PRESENT;
17196 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17200 VkResult VmaAllocator_T::BindVulkanImage(
17201 VkDeviceMemory memory,
17202 VkDeviceSize memoryOffset,
17206 if(pNext != VMA_NULL)
17208 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17209 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17210 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17212 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17213 bindBufferMemoryInfo.pNext = pNext;
17214 bindBufferMemoryInfo.image = image;
17215 bindBufferMemoryInfo.memory = memory;
17216 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17217 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17220 #endif // #if VMA_BIND_MEMORY2
17222 return VK_ERROR_EXTENSION_NOT_PRESENT;
17227 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17231 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17233 if(hAllocation->CanBecomeLost())
17235 return VK_ERROR_MEMORY_MAP_FAILED;
17238 switch(hAllocation->GetType())
17240 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17242 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17243 char *pBytes = VMA_NULL;
17244 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17245 if(res == VK_SUCCESS)
17247 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17248 hAllocation->BlockAllocMap();
17252 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17253 return hAllocation->DedicatedAllocMap(
this, ppData);
17256 return VK_ERROR_MEMORY_MAP_FAILED;
17262 switch(hAllocation->GetType())
17264 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17266 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17267 hAllocation->BlockAllocUnmap();
17268 pBlock->Unmap(
this, 1);
17271 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17272 hAllocation->DedicatedAllocUnmap(
this);
17279 VkResult VmaAllocator_T::BindBufferMemory(
17281 VkDeviceSize allocationLocalOffset,
17285 VkResult res = VK_SUCCESS;
17286 switch(hAllocation->GetType())
17288 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17289 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17291 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17293 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17294 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17295 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17304 VkResult VmaAllocator_T::BindImageMemory(
17306 VkDeviceSize allocationLocalOffset,
17310 VkResult res = VK_SUCCESS;
17311 switch(hAllocation->GetType())
17313 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17314 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17316 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17318 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17319 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17320 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17329 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17331 VkDeviceSize offset, VkDeviceSize size,
17332 VMA_CACHE_OPERATION op)
17334 VkResult res = VK_SUCCESS;
17336 VkMappedMemoryRange memRange = {};
17337 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17341 case VMA_CACHE_FLUSH:
17342 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17344 case VMA_CACHE_INVALIDATE:
17345 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17355 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17356 uint32_t allocationCount,
17358 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17359 VMA_CACHE_OPERATION op)
17361 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17362 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17363 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17365 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17368 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17369 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17370 VkMappedMemoryRange newRange;
17371 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17373 ranges.push_back(newRange);
17377 VkResult res = VK_SUCCESS;
17378 if(!ranges.empty())
17382 case VMA_CACHE_FLUSH:
17383 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17385 case VMA_CACHE_INVALIDATE:
17386 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17396 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17398 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17400 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17402 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17403 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
17404 VMA_ASSERT(pDedicatedAllocations);
17405 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
17406 VMA_ASSERT(success);
17409 VkDeviceMemory hMemory = allocation->GetMemory();
17421 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17423 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17426 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17428 VkBufferCreateInfo dummyBufCreateInfo;
17429 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17431 uint32_t memoryTypeBits = 0;
17434 VkBuffer buf = VK_NULL_HANDLE;
17435 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17436 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17437 if(res == VK_SUCCESS)
17440 VkMemoryRequirements memReq;
17441 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17442 memoryTypeBits = memReq.memoryTypeBits;
17445 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17448 return memoryTypeBits;
17451 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17454 VMA_ASSERT(GetMemoryTypeCount() > 0);
17456 uint32_t memoryTypeBits = UINT32_MAX;
17458 if(!m_UseAmdDeviceCoherentMemory)
17461 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17463 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17465 memoryTypeBits &= ~(1u << memTypeIndex);
17470 return memoryTypeBits;
17473 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17475 VkDeviceSize offset, VkDeviceSize size,
17476 VkMappedMemoryRange& outRange)
const
17478 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17479 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17481 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17482 const VkDeviceSize allocationSize = allocation->GetSize();
17483 VMA_ASSERT(offset <= allocationSize);
17485 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17486 outRange.pNext = VMA_NULL;
17487 outRange.memory = allocation->GetMemory();
17489 switch(allocation->GetType())
17491 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17492 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17493 if(size == VK_WHOLE_SIZE)
17495 outRange.size = allocationSize - outRange.offset;
17499 VMA_ASSERT(offset + size <= allocationSize);
17500 outRange.size = VMA_MIN(
17501 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17502 allocationSize - outRange.offset);
17505 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17508 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17509 if(size == VK_WHOLE_SIZE)
17511 size = allocationSize - offset;
17515 VMA_ASSERT(offset + size <= allocationSize);
17517 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17520 const VkDeviceSize allocationOffset = allocation->GetOffset();
17521 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17522 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17523 outRange.offset += allocationOffset;
17524 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17536 #if VMA_MEMORY_BUDGET
17538 void VmaAllocator_T::UpdateVulkanBudget()
17540 VMA_ASSERT(m_UseExtMemoryBudget);
17542 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17544 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17545 VmaPnextChainPushFront(&memProps, &budgetProps);
17547 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17550 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17552 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17554 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17555 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17556 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17559 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17561 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17563 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17565 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17567 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17569 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17572 m_Budget.m_OperationsSinceBudgetFetch = 0;
17576 #endif // #if VMA_MEMORY_BUDGET
17578 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17580 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17581 !hAllocation->CanBecomeLost() &&
17582 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17584 void* pData = VMA_NULL;
17585 VkResult res = Map(hAllocation, &pData);
17586 if(res == VK_SUCCESS)
17588 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17589 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17590 Unmap(hAllocation);
17594 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17599 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17601 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17602 if(memoryTypeBits == UINT32_MAX)
17604 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17605 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17607 return memoryTypeBits;
17610 #if VMA_STATS_STRING_ENABLED
17612 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17614 bool dedicatedAllocationsStarted =
false;
17615 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17617 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17618 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17619 VMA_ASSERT(pDedicatedAllocVector);
17620 if(pDedicatedAllocVector->empty() ==
false)
17622 if(dedicatedAllocationsStarted ==
false)
17624 dedicatedAllocationsStarted =
true;
17625 json.WriteString(
"DedicatedAllocations");
17626 json.BeginObject();
17629 json.BeginString(
"Type ");
17630 json.ContinueString(memTypeIndex);
17635 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17637 json.BeginObject(
true);
17639 hAlloc->PrintParameters(json);
17646 if(dedicatedAllocationsStarted)
17652 bool allocationsStarted =
false;
17653 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17655 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17657 if(allocationsStarted ==
false)
17659 allocationsStarted =
true;
17660 json.WriteString(
"DefaultPools");
17661 json.BeginObject();
17664 json.BeginString(
"Type ");
17665 json.ContinueString(memTypeIndex);
17668 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17671 if(allocationsStarted)
17679 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17680 const size_t poolCount = m_Pools.size();
17683 json.WriteString(
"Pools");
17684 json.BeginObject();
17685 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17687 json.BeginString();
17688 json.ContinueString(m_Pools[poolIndex]->GetId());
17691 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17698 #endif // #if VMA_STATS_STRING_ENABLED
17707 VMA_ASSERT(pCreateInfo && pAllocator);
17710 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17712 return (*pAllocator)->Init(pCreateInfo);
17718 if(allocator != VK_NULL_HANDLE)
17720 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17721 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17722 vma_delete(&allocationCallbacks, allocator);
17728 VMA_ASSERT(allocator && pAllocatorInfo);
17729 pAllocatorInfo->
instance = allocator->m_hInstance;
17730 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17731 pAllocatorInfo->
device = allocator->m_hDevice;
17736 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17738 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17739 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17744 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17746 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17747 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17752 uint32_t memoryTypeIndex,
17753 VkMemoryPropertyFlags* pFlags)
17755 VMA_ASSERT(allocator && pFlags);
17756 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17757 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17762 uint32_t frameIndex)
17764 VMA_ASSERT(allocator);
17765 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17767 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17769 allocator->SetCurrentFrameIndex(frameIndex);
17776 VMA_ASSERT(allocator && pStats);
17777 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17778 allocator->CalculateStats(pStats);
17785 VMA_ASSERT(allocator && pBudget);
17786 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17787 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17790 #if VMA_STATS_STRING_ENABLED
17794 char** ppStatsString,
17795 VkBool32 detailedMap)
17797 VMA_ASSERT(allocator && ppStatsString);
17798 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17800 VmaStringBuilder sb(allocator);
17802 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17803 json.BeginObject();
17806 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17809 allocator->CalculateStats(&stats);
17811 json.WriteString(
"Total");
17812 VmaPrintStatInfo(json, stats.
total);
17814 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17816 json.BeginString(
"Heap ");
17817 json.ContinueString(heapIndex);
17819 json.BeginObject();
17821 json.WriteString(
"Size");
17822 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17824 json.WriteString(
"Flags");
17825 json.BeginArray(
true);
17826 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17828 json.WriteString(
"DEVICE_LOCAL");
17832 json.WriteString(
"Budget");
17833 json.BeginObject();
17835 json.WriteString(
"BlockBytes");
17836 json.WriteNumber(budget[heapIndex].blockBytes);
17837 json.WriteString(
"AllocationBytes");
17838 json.WriteNumber(budget[heapIndex].allocationBytes);
17839 json.WriteString(
"Usage");
17840 json.WriteNumber(budget[heapIndex].usage);
17841 json.WriteString(
"Budget");
17842 json.WriteNumber(budget[heapIndex].budget);
17848 json.WriteString(
"Stats");
17849 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17852 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17854 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17856 json.BeginString(
"Type ");
17857 json.ContinueString(typeIndex);
17860 json.BeginObject();
17862 json.WriteString(
"Flags");
17863 json.BeginArray(
true);
17864 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17865 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17867 json.WriteString(
"DEVICE_LOCAL");
17869 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17871 json.WriteString(
"HOST_VISIBLE");
17873 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17875 json.WriteString(
"HOST_COHERENT");
17877 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17879 json.WriteString(
"HOST_CACHED");
17881 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17883 json.WriteString(
"LAZILY_ALLOCATED");
17885 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17887 json.WriteString(
" PROTECTED");
17889 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17891 json.WriteString(
" DEVICE_COHERENT");
17893 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17895 json.WriteString(
" DEVICE_UNCACHED");
17901 json.WriteString(
"Stats");
17902 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17911 if(detailedMap == VK_TRUE)
17913 allocator->PrintDetailedMap(json);
17919 const size_t len = sb.GetLength();
17920 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17923 memcpy(pChars, sb.GetData(), len);
17925 pChars[len] =
'\0';
17926 *ppStatsString = pChars;
17931 char* pStatsString)
17933 if(pStatsString != VMA_NULL)
17935 VMA_ASSERT(allocator);
17936 size_t len = strlen(pStatsString);
17937 vma_delete_array(allocator, pStatsString, len + 1);
17941 #endif // #if VMA_STATS_STRING_ENABLED
17948 uint32_t memoryTypeBits,
17950 uint32_t* pMemoryTypeIndex)
17952 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17953 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17954 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17956 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17963 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17964 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17965 uint32_t notPreferredFlags = 0;
17968 switch(pAllocationCreateInfo->
usage)
17973 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17975 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17979 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17982 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17983 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17985 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17989 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17990 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17993 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17996 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
18005 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
18007 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
18010 *pMemoryTypeIndex = UINT32_MAX;
18011 uint32_t minCost = UINT32_MAX;
18012 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
18013 memTypeIndex < allocator->GetMemoryTypeCount();
18014 ++memTypeIndex, memTypeBit <<= 1)
18017 if((memTypeBit & memoryTypeBits) != 0)
18019 const VkMemoryPropertyFlags currFlags =
18020 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
18022 if((requiredFlags & ~currFlags) == 0)
18025 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
18026 VmaCountBitsSet(currFlags & notPreferredFlags);
18028 if(currCost < minCost)
18030 *pMemoryTypeIndex = memTypeIndex;
18035 minCost = currCost;
18040 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
18045 const VkBufferCreateInfo* pBufferCreateInfo,
18047 uint32_t* pMemoryTypeIndex)
18049 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18050 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
18051 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18052 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18054 const VkDevice hDev = allocator->m_hDevice;
18055 VkBuffer hBuffer = VK_NULL_HANDLE;
18056 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
18057 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
18058 if(res == VK_SUCCESS)
18060 VkMemoryRequirements memReq = {};
18061 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
18062 hDev, hBuffer, &memReq);
18066 memReq.memoryTypeBits,
18067 pAllocationCreateInfo,
18070 allocator->GetVulkanFunctions().vkDestroyBuffer(
18071 hDev, hBuffer, allocator->GetAllocationCallbacks());
18078 const VkImageCreateInfo* pImageCreateInfo,
18080 uint32_t* pMemoryTypeIndex)
18082 VMA_ASSERT(allocator != VK_NULL_HANDLE);
18083 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
18084 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
18085 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
18087 const VkDevice hDev = allocator->m_hDevice;
18088 VkImage hImage = VK_NULL_HANDLE;
18089 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
18090 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
18091 if(res == VK_SUCCESS)
18093 VkMemoryRequirements memReq = {};
18094 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
18095 hDev, hImage, &memReq);
18099 memReq.memoryTypeBits,
18100 pAllocationCreateInfo,
18103 allocator->GetVulkanFunctions().vkDestroyImage(
18104 hDev, hImage, allocator->GetAllocationCallbacks());
18114 VMA_ASSERT(allocator && pCreateInfo && pPool);
18116 VMA_DEBUG_LOG(
"vmaCreatePool");
18118 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18120 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
18122 #if VMA_RECORDING_ENABLED
18123 if(allocator->GetRecorder() != VMA_NULL)
18125 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
18136 VMA_ASSERT(allocator);
18138 if(pool == VK_NULL_HANDLE)
18143 VMA_DEBUG_LOG(
"vmaDestroyPool");
18145 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18147 #if VMA_RECORDING_ENABLED
18148 if(allocator->GetRecorder() != VMA_NULL)
18150 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
18154 allocator->DestroyPool(pool);
18162 VMA_ASSERT(allocator && pool && pPoolStats);
18164 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18166 allocator->GetPoolStats(pool, pPoolStats);
18172 size_t* pLostAllocationCount)
18174 VMA_ASSERT(allocator && pool);
18176 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18178 #if VMA_RECORDING_ENABLED
18179 if(allocator->GetRecorder() != VMA_NULL)
18181 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18185 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18190 VMA_ASSERT(allocator && pool);
18192 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18194 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18196 return allocator->CheckPoolCorruption(pool);
18202 const char** ppName)
18204 VMA_ASSERT(allocator && pool && ppName);
18206 VMA_DEBUG_LOG(
"vmaGetPoolName");
18208 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18210 *ppName = pool->GetName();
18218 VMA_ASSERT(allocator && pool);
18220 VMA_DEBUG_LOG(
"vmaSetPoolName");
18222 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18224 pool->SetName(pName);
18226 #if VMA_RECORDING_ENABLED
18227 if(allocator->GetRecorder() != VMA_NULL)
18229 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18236 const VkMemoryRequirements* pVkMemoryRequirements,
18241 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18243 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18245 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18247 VkResult result = allocator->AllocateMemory(
18248 *pVkMemoryRequirements,
18255 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18259 #if VMA_RECORDING_ENABLED
18260 if(allocator->GetRecorder() != VMA_NULL)
18262 allocator->GetRecorder()->RecordAllocateMemory(
18263 allocator->GetCurrentFrameIndex(),
18264 *pVkMemoryRequirements,
18270 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18272 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18280 const VkMemoryRequirements* pVkMemoryRequirements,
18282 size_t allocationCount,
18286 if(allocationCount == 0)
18291 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18293 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18295 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18297 VkResult result = allocator->AllocateMemory(
18298 *pVkMemoryRequirements,
18305 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18309 #if VMA_RECORDING_ENABLED
18310 if(allocator->GetRecorder() != VMA_NULL)
18312 allocator->GetRecorder()->RecordAllocateMemoryPages(
18313 allocator->GetCurrentFrameIndex(),
18314 *pVkMemoryRequirements,
18316 (uint64_t)allocationCount,
18321 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18323 for(
size_t i = 0; i < allocationCount; ++i)
18325 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18339 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18341 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18343 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18345 VkMemoryRequirements vkMemReq = {};
18346 bool requiresDedicatedAllocation =
false;
18347 bool prefersDedicatedAllocation =
false;
18348 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18349 requiresDedicatedAllocation,
18350 prefersDedicatedAllocation);
18352 VkResult result = allocator->AllocateMemory(
18354 requiresDedicatedAllocation,
18355 prefersDedicatedAllocation,
18360 VMA_SUBALLOCATION_TYPE_BUFFER,
18364 #if VMA_RECORDING_ENABLED
18365 if(allocator->GetRecorder() != VMA_NULL)
18367 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18368 allocator->GetCurrentFrameIndex(),
18370 requiresDedicatedAllocation,
18371 prefersDedicatedAllocation,
18377 if(pAllocationInfo && result == VK_SUCCESS)
18379 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18392 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18394 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18396 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18398 VkMemoryRequirements vkMemReq = {};
18399 bool requiresDedicatedAllocation =
false;
18400 bool prefersDedicatedAllocation =
false;
18401 allocator->GetImageMemoryRequirements(image, vkMemReq,
18402 requiresDedicatedAllocation, prefersDedicatedAllocation);
18404 VkResult result = allocator->AllocateMemory(
18406 requiresDedicatedAllocation,
18407 prefersDedicatedAllocation,
18412 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18416 #if VMA_RECORDING_ENABLED
18417 if(allocator->GetRecorder() != VMA_NULL)
18419 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18420 allocator->GetCurrentFrameIndex(),
18422 requiresDedicatedAllocation,
18423 prefersDedicatedAllocation,
18429 if(pAllocationInfo && result == VK_SUCCESS)
18431 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18441 VMA_ASSERT(allocator);
18443 if(allocation == VK_NULL_HANDLE)
18448 VMA_DEBUG_LOG(
"vmaFreeMemory");
18450 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18452 #if VMA_RECORDING_ENABLED
18453 if(allocator->GetRecorder() != VMA_NULL)
18455 allocator->GetRecorder()->RecordFreeMemory(
18456 allocator->GetCurrentFrameIndex(),
18461 allocator->FreeMemory(
18468 size_t allocationCount,
18471 if(allocationCount == 0)
18476 VMA_ASSERT(allocator);
18478 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18480 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18482 #if VMA_RECORDING_ENABLED
18483 if(allocator->GetRecorder() != VMA_NULL)
18485 allocator->GetRecorder()->RecordFreeMemoryPages(
18486 allocator->GetCurrentFrameIndex(),
18487 (uint64_t)allocationCount,
18492 allocator->FreeMemory(allocationCount, pAllocations);
18498 VkDeviceSize newSize)
18500 VMA_ASSERT(allocator && allocation);
18502 VMA_DEBUG_LOG(
"vmaResizeAllocation");
18504 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18506 return allocator->ResizeAllocation(allocation, newSize);
18514 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18516 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18518 #if VMA_RECORDING_ENABLED
18519 if(allocator->GetRecorder() != VMA_NULL)
18521 allocator->GetRecorder()->RecordGetAllocationInfo(
18522 allocator->GetCurrentFrameIndex(),
18527 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18534 VMA_ASSERT(allocator && allocation);
18536 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18538 #if VMA_RECORDING_ENABLED
18539 if(allocator->GetRecorder() != VMA_NULL)
18541 allocator->GetRecorder()->RecordTouchAllocation(
18542 allocator->GetCurrentFrameIndex(),
18547 return allocator->TouchAllocation(allocation);
18555 VMA_ASSERT(allocator && allocation);
18557 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18559 allocation->SetUserData(allocator, pUserData);
18561 #if VMA_RECORDING_ENABLED
18562 if(allocator->GetRecorder() != VMA_NULL)
18564 allocator->GetRecorder()->RecordSetAllocationUserData(
18565 allocator->GetCurrentFrameIndex(),
18576 VMA_ASSERT(allocator && pAllocation);
18578 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18580 allocator->CreateLostAllocation(pAllocation);
18582 #if VMA_RECORDING_ENABLED
18583 if(allocator->GetRecorder() != VMA_NULL)
18585 allocator->GetRecorder()->RecordCreateLostAllocation(
18586 allocator->GetCurrentFrameIndex(),
18597 VMA_ASSERT(allocator && allocation && ppData);
18599 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18601 VkResult res = allocator->Map(allocation, ppData);
18603 #if VMA_RECORDING_ENABLED
18604 if(allocator->GetRecorder() != VMA_NULL)
18606 allocator->GetRecorder()->RecordMapMemory(
18607 allocator->GetCurrentFrameIndex(),
18619 VMA_ASSERT(allocator && allocation);
18621 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18623 #if VMA_RECORDING_ENABLED
18624 if(allocator->GetRecorder() != VMA_NULL)
18626 allocator->GetRecorder()->RecordUnmapMemory(
18627 allocator->GetCurrentFrameIndex(),
18632 allocator->Unmap(allocation);
18637 VMA_ASSERT(allocator && allocation);
18639 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18641 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18643 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18645 #if VMA_RECORDING_ENABLED
18646 if(allocator->GetRecorder() != VMA_NULL)
18648 allocator->GetRecorder()->RecordFlushAllocation(
18649 allocator->GetCurrentFrameIndex(),
18650 allocation, offset, size);
18659 VMA_ASSERT(allocator && allocation);
18661 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18663 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18665 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18667 #if VMA_RECORDING_ENABLED
18668 if(allocator->GetRecorder() != VMA_NULL)
18670 allocator->GetRecorder()->RecordInvalidateAllocation(
18671 allocator->GetCurrentFrameIndex(),
18672 allocation, offset, size);
18681 uint32_t allocationCount,
18683 const VkDeviceSize* offsets,
18684 const VkDeviceSize* sizes)
18686 VMA_ASSERT(allocator);
18688 if(allocationCount == 0)
18693 VMA_ASSERT(allocations);
18695 VMA_DEBUG_LOG(
"vmaFlushAllocations");
18697 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18699 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
18701 #if VMA_RECORDING_ENABLED
18702 if(allocator->GetRecorder() != VMA_NULL)
18713 uint32_t allocationCount,
18715 const VkDeviceSize* offsets,
18716 const VkDeviceSize* sizes)
18718 VMA_ASSERT(allocator);
18720 if(allocationCount == 0)
18725 VMA_ASSERT(allocations);
18727 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
18729 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18731 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
18733 #if VMA_RECORDING_ENABLED
18734 if(allocator->GetRecorder() != VMA_NULL)
18745 VMA_ASSERT(allocator);
18747 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18749 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18751 return allocator->CheckCorruption(memoryTypeBits);
18757 size_t allocationCount,
18758 VkBool32* pAllocationsChanged,
18768 if(pDefragmentationInfo != VMA_NULL)
18782 if(res == VK_NOT_READY)
18795 VMA_ASSERT(allocator && pInfo && pContext);
18806 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18808 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18810 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18812 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18814 #if VMA_RECORDING_ENABLED
18815 if(allocator->GetRecorder() != VMA_NULL)
18817 allocator->GetRecorder()->RecordDefragmentationBegin(
18818 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18829 VMA_ASSERT(allocator);
18831 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18833 if(context != VK_NULL_HANDLE)
18835 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18837 #if VMA_RECORDING_ENABLED
18838 if(allocator->GetRecorder() != VMA_NULL)
18840 allocator->GetRecorder()->RecordDefragmentationEnd(
18841 allocator->GetCurrentFrameIndex(), context);
18845 return allocator->DefragmentationEnd(context);
18859 VMA_ASSERT(allocator);
18862 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18864 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18866 if(context == VK_NULL_HANDLE)
18872 return allocator->DefragmentationPassBegin(pInfo, context);
18878 VMA_ASSERT(allocator);
18880 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18881 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18883 if(context == VK_NULL_HANDLE)
18886 return allocator->DefragmentationPassEnd(context);
18894 VMA_ASSERT(allocator && allocation && buffer);
18896 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18898 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18900 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18906 VkDeviceSize allocationLocalOffset,
18910 VMA_ASSERT(allocator && allocation && buffer);
18912 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18914 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18916 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18924 VMA_ASSERT(allocator && allocation && image);
18926 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18928 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18930 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18936 VkDeviceSize allocationLocalOffset,
18940 VMA_ASSERT(allocator && allocation && image);
18942 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18944 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18946 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18951 const VkBufferCreateInfo* pBufferCreateInfo,
18957 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18959 if(pBufferCreateInfo->size == 0)
18961 return VK_ERROR_VALIDATION_FAILED_EXT;
18963 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18964 !allocator->m_UseKhrBufferDeviceAddress)
18966 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18967 return VK_ERROR_VALIDATION_FAILED_EXT;
18970 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18972 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18974 *pBuffer = VK_NULL_HANDLE;
18975 *pAllocation = VK_NULL_HANDLE;
18978 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18979 allocator->m_hDevice,
18981 allocator->GetAllocationCallbacks(),
18986 VkMemoryRequirements vkMemReq = {};
18987 bool requiresDedicatedAllocation =
false;
18988 bool prefersDedicatedAllocation =
false;
18989 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18990 requiresDedicatedAllocation, prefersDedicatedAllocation);
18993 res = allocator->AllocateMemory(
18995 requiresDedicatedAllocation,
18996 prefersDedicatedAllocation,
18998 pBufferCreateInfo->usage,
19000 *pAllocationCreateInfo,
19001 VMA_SUBALLOCATION_TYPE_BUFFER,
19005 #if VMA_RECORDING_ENABLED
19006 if(allocator->GetRecorder() != VMA_NULL)
19008 allocator->GetRecorder()->RecordCreateBuffer(
19009 allocator->GetCurrentFrameIndex(),
19010 *pBufferCreateInfo,
19011 *pAllocationCreateInfo,
19021 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
19026 #if VMA_STATS_STRING_ENABLED
19027 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
19029 if(pAllocationInfo != VMA_NULL)
19031 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19036 allocator->FreeMemory(
19039 *pAllocation = VK_NULL_HANDLE;
19040 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19041 *pBuffer = VK_NULL_HANDLE;
19044 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
19045 *pBuffer = VK_NULL_HANDLE;
19056 VMA_ASSERT(allocator);
19058 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19063 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
19065 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19067 #if VMA_RECORDING_ENABLED
19068 if(allocator->GetRecorder() != VMA_NULL)
19070 allocator->GetRecorder()->RecordDestroyBuffer(
19071 allocator->GetCurrentFrameIndex(),
19076 if(buffer != VK_NULL_HANDLE)
19078 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
19081 if(allocation != VK_NULL_HANDLE)
19083 allocator->FreeMemory(
19091 const VkImageCreateInfo* pImageCreateInfo,
19097 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
19099 if(pImageCreateInfo->extent.width == 0 ||
19100 pImageCreateInfo->extent.height == 0 ||
19101 pImageCreateInfo->extent.depth == 0 ||
19102 pImageCreateInfo->mipLevels == 0 ||
19103 pImageCreateInfo->arrayLayers == 0)
19105 return VK_ERROR_VALIDATION_FAILED_EXT;
19108 VMA_DEBUG_LOG(
"vmaCreateImage");
19110 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19112 *pImage = VK_NULL_HANDLE;
19113 *pAllocation = VK_NULL_HANDLE;
19116 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
19117 allocator->m_hDevice,
19119 allocator->GetAllocationCallbacks(),
19123 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
19124 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
19125 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
19128 VkMemoryRequirements vkMemReq = {};
19129 bool requiresDedicatedAllocation =
false;
19130 bool prefersDedicatedAllocation =
false;
19131 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
19132 requiresDedicatedAllocation, prefersDedicatedAllocation);
19134 res = allocator->AllocateMemory(
19136 requiresDedicatedAllocation,
19137 prefersDedicatedAllocation,
19141 *pAllocationCreateInfo,
19146 #if VMA_RECORDING_ENABLED
19147 if(allocator->GetRecorder() != VMA_NULL)
19149 allocator->GetRecorder()->RecordCreateImage(
19150 allocator->GetCurrentFrameIndex(),
19152 *pAllocationCreateInfo,
19162 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19167 #if VMA_STATS_STRING_ENABLED
19168 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19170 if(pAllocationInfo != VMA_NULL)
19172 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19177 allocator->FreeMemory(
19180 *pAllocation = VK_NULL_HANDLE;
19181 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19182 *pImage = VK_NULL_HANDLE;
19185 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19186 *pImage = VK_NULL_HANDLE;
19197 VMA_ASSERT(allocator);
19199 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19204 VMA_DEBUG_LOG(
"vmaDestroyImage");
19206 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19208 #if VMA_RECORDING_ENABLED
19209 if(allocator->GetRecorder() != VMA_NULL)
19211 allocator->GetRecorder()->RecordDestroyImage(
19212 allocator->GetCurrentFrameIndex(),
19217 if(image != VK_NULL_HANDLE)
19219 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19221 if(allocation != VK_NULL_HANDLE)
19223 allocator->FreeMemory(
19229 #endif // #ifdef VMA_IMPLEMENTATION