23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1893 #if VMA_RECORDING_ENABLED
1896 #include <windows.h>
1911 #ifndef VMA_RECORDING_ENABLED
1912 #define VMA_RECORDING_ENABLED 0
1916 #define NOMINMAX // For windows.h
1919 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
1920 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
1921 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
1922 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
1923 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
1924 extern PFN_vkAllocateMemory vkAllocateMemory;
1925 extern PFN_vkFreeMemory vkFreeMemory;
1926 extern PFN_vkMapMemory vkMapMemory;
1927 extern PFN_vkUnmapMemory vkUnmapMemory;
1928 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
1929 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
1930 extern PFN_vkBindBufferMemory vkBindBufferMemory;
1931 extern PFN_vkBindImageMemory vkBindImageMemory;
1932 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
1933 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
1934 extern PFN_vkCreateBuffer vkCreateBuffer;
1935 extern PFN_vkDestroyBuffer vkDestroyBuffer;
1936 extern PFN_vkCreateImage vkCreateImage;
1937 extern PFN_vkDestroyImage vkDestroyImage;
1938 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
1939 #if VMA_VULKAN_VERSION >= 1001000
1940 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
1941 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
1942 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
1943 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
1944 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
1945 #endif // #if VMA_VULKAN_VERSION >= 1001000
1946 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
1949 #include <vulkan/vulkan.h>
1955 #if !defined(VMA_VULKAN_VERSION)
1956 #if defined(VK_VERSION_1_2)
1957 #define VMA_VULKAN_VERSION 1002000
1958 #elif defined(VK_VERSION_1_1)
1959 #define VMA_VULKAN_VERSION 1001000
1961 #define VMA_VULKAN_VERSION 1000000
1965 #if !defined(VMA_DEDICATED_ALLOCATION)
1966 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1967 #define VMA_DEDICATED_ALLOCATION 1
1969 #define VMA_DEDICATED_ALLOCATION 0
1973 #if !defined(VMA_BIND_MEMORY2)
1974 #if VK_KHR_bind_memory2
1975 #define VMA_BIND_MEMORY2 1
1977 #define VMA_BIND_MEMORY2 0
1981 #if !defined(VMA_MEMORY_BUDGET)
1982 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1983 #define VMA_MEMORY_BUDGET 1
1985 #define VMA_MEMORY_BUDGET 0
1990 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
1991 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
1992 #define VMA_BUFFER_DEVICE_ADDRESS 1
1994 #define VMA_BUFFER_DEVICE_ADDRESS 0
2003 #ifndef VMA_CALL_PRE
2004 #define VMA_CALL_PRE
2006 #ifndef VMA_CALL_POST
2007 #define VMA_CALL_POST
2021 #ifndef VMA_LEN_IF_NOT_NULL
2022 #define VMA_LEN_IF_NOT_NULL(len)
2027 #ifndef VMA_NULLABLE
2029 #define VMA_NULLABLE _Nullable
2031 #define VMA_NULLABLE
2037 #ifndef VMA_NOT_NULL
2039 #define VMA_NOT_NULL _Nonnull
2041 #define VMA_NOT_NULL
2047 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2048 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2049 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2051 #define VMA_NOT_NULL_NON_DISPATCHABLE
2055 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2056 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2057 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2059 #define VMA_NULLABLE_NON_DISPATCHABLE
2077 uint32_t memoryType,
2078 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2080 void* VMA_NULLABLE pUserData);
2084 uint32_t memoryType,
2085 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2087 void* VMA_NULLABLE pUserData);
2227 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2228 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2229 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2231 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2232 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2233 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2235 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2236 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2326 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2399 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2407 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2417 uint32_t memoryTypeIndex,
2418 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2430 uint32_t frameIndex);
2526 #ifndef VMA_STATS_STRING_ENABLED
2527 #define VMA_STATS_STRING_ENABLED 1
2530 #if VMA_STATS_STRING_ENABLED
2537 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2538 VkBool32 detailedMap);
2542 char* VMA_NULLABLE pStatsString);
2544 #endif // #if VMA_STATS_STRING_ENABLED
2796 uint32_t memoryTypeBits,
2798 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2814 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2816 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2832 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2834 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2978 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
3006 size_t* VMA_NULLABLE pLostAllocationCount);
3033 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3043 const char* VMA_NULLABLE pName);
3132 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3158 const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements,
3160 size_t allocationCount,
3161 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3162 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3172 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3180 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3205 size_t allocationCount,
3206 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3218 VkDeviceSize newSize);
3275 void* VMA_NULLABLE pUserData);
3332 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3370 VkDeviceSize offset,
3397 VkDeviceSize offset,
3416 uint32_t allocationCount,
3417 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3418 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3419 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3437 uint32_t allocationCount,
3438 const VmaAllocation VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations,
3439 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets,
3440 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes);
3519 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3553 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3691 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3692 size_t allocationCount,
3693 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3712 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3727 VkDeviceSize allocationLocalOffset,
3728 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3729 const void* VMA_NULLABLE pNext);
3746 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3761 VkDeviceSize allocationLocalOffset,
3762 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3763 const void* VMA_NULLABLE pNext);
3793 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3795 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3812 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3818 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3820 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3837 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
3844 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3847 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3848 #define VMA_IMPLEMENTATION
3851 #ifdef VMA_IMPLEMENTATION
3852 #undef VMA_IMPLEMENTATION
3872 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3873 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3882 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
3883 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
3895 #if VMA_USE_STL_CONTAINERS
3896 #define VMA_USE_STL_VECTOR 1
3897 #define VMA_USE_STL_UNORDERED_MAP 1
3898 #define VMA_USE_STL_LIST 1
3901 #ifndef VMA_USE_STL_SHARED_MUTEX
3903 #if __cplusplus >= 201703L
3904 #define VMA_USE_STL_SHARED_MUTEX 1
3908 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3909 #define VMA_USE_STL_SHARED_MUTEX 1
3911 #define VMA_USE_STL_SHARED_MUTEX 0
3919 #if VMA_USE_STL_VECTOR
3923 #if VMA_USE_STL_UNORDERED_MAP
3924 #include <unordered_map>
3927 #if VMA_USE_STL_LIST
3936 #include <algorithm>
3941 #define VMA_NULL nullptr
3944 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3946 void *vma_aligned_alloc(
size_t alignment,
size_t size)
3949 if(alignment <
sizeof(
void*))
3951 alignment =
sizeof(
void*);
3954 return memalign(alignment, size);
3956 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3959 #if defined(__APPLE__)
3960 #include <AvailabilityMacros.h>
3963 void *vma_aligned_alloc(
size_t alignment,
size_t size)
3965 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
3966 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
3973 if (__builtin_available(macOS 10.15, iOS 13, *))
3974 return aligned_alloc(alignment, size);
3978 if(alignment <
sizeof(
void*))
3980 alignment =
sizeof(
void*);
3984 if(posix_memalign(&pointer, alignment, size) == 0)
3988 #elif defined(_WIN32)
3989 void *vma_aligned_alloc(
size_t alignment,
size_t size)
3991 return _aligned_malloc(size, alignment);
3994 void *vma_aligned_alloc(
size_t alignment,
size_t size)
3996 return aligned_alloc(alignment, size);
4008 #define VMA_ASSERT(expr)
4010 #define VMA_ASSERT(expr) assert(expr)
4016 #ifndef VMA_HEAVY_ASSERT
4018 #define VMA_HEAVY_ASSERT(expr)
4020 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
4024 #ifndef VMA_ALIGN_OF
4025 #define VMA_ALIGN_OF(type) (__alignof(type))
4028 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
4029 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
4032 #ifndef VMA_SYSTEM_FREE
4034 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
4036 #define VMA_SYSTEM_FREE(ptr) free(ptr)
4041 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
4045 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
4049 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
4053 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
4056 #ifndef VMA_DEBUG_LOG
4057 #define VMA_DEBUG_LOG(format, ...)
4067 #if VMA_STATS_STRING_ENABLED
4068 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
4070 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
4072 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
4074 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
4076 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
4078 snprintf(outStr, strLen,
"%p", ptr);
4086 void Lock() { m_Mutex.lock(); }
4087 void Unlock() { m_Mutex.unlock(); }
4088 bool TryLock() {
return m_Mutex.try_lock(); }
4092 #define VMA_MUTEX VmaMutex
4096 #ifndef VMA_RW_MUTEX
4097 #if VMA_USE_STL_SHARED_MUTEX
4099 #include <shared_mutex>
4103 void LockRead() { m_Mutex.lock_shared(); }
4104 void UnlockRead() { m_Mutex.unlock_shared(); }
4105 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4106 void LockWrite() { m_Mutex.lock(); }
4107 void UnlockWrite() { m_Mutex.unlock(); }
4108 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4110 std::shared_mutex m_Mutex;
4112 #define VMA_RW_MUTEX VmaRWMutex
4113 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4119 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4120 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4121 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4122 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4123 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4124 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4125 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4129 #define VMA_RW_MUTEX VmaRWMutex
4135 void LockRead() { m_Mutex.Lock(); }
4136 void UnlockRead() { m_Mutex.Unlock(); }
4137 bool TryLockRead() {
return m_Mutex.TryLock(); }
4138 void LockWrite() { m_Mutex.Lock(); }
4139 void UnlockWrite() { m_Mutex.Unlock(); }
4140 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4144 #define VMA_RW_MUTEX VmaRWMutex
4145 #endif // #if VMA_USE_STL_SHARED_MUTEX
4146 #endif // #ifndef VMA_RW_MUTEX
4151 #ifndef VMA_ATOMIC_UINT32
4153 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4156 #ifndef VMA_ATOMIC_UINT64
4158 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4161 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4166 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4169 #ifndef VMA_DEBUG_ALIGNMENT
4174 #define VMA_DEBUG_ALIGNMENT (1)
4177 #ifndef VMA_DEBUG_MARGIN
4182 #define VMA_DEBUG_MARGIN (0)
4185 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4190 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4193 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4199 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4202 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4207 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4210 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4215 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4218 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4219 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4223 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4224 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4228 #ifndef VMA_CLASS_NO_COPY
4229 #define VMA_CLASS_NO_COPY(className) \
4231 className(const className&) = delete; \
4232 className& operator=(const className&) = delete;
4235 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4238 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4240 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4241 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4249 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4250 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4251 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4253 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4255 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4256 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4259 static inline uint32_t VmaCountBitsSet(uint32_t v)
4261 uint32_t c = v - ((v >> 1) & 0x55555555);
4262 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4263 c = ((c >> 4) + c) & 0x0F0F0F0F;
4264 c = ((c >> 8) + c) & 0x00FF00FF;
4265 c = ((c >> 16) + c) & 0x0000FFFF;
4274 template <
typename T>
4275 inline bool VmaIsPow2(T x)
4277 return (x & (x-1)) == 0;
4282 template <
typename T>
4283 static inline T VmaAlignUp(T val, T alignment)
4285 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4286 return (val + alignment - 1) & ~(alignment - 1);
4290 template <
typename T>
4291 static inline T VmaAlignDown(T val, T alignment)
4293 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
4294 return val & ~(alignment - 1);
4298 template <
typename T>
4299 static inline T VmaRoundDiv(T x, T y)
4301 return (x + (y / (T)2)) / y;
4305 static inline uint32_t VmaNextPow2(uint32_t v)
4316 static inline uint64_t VmaNextPow2(uint64_t v)
4330 static inline uint32_t VmaPrevPow2(uint32_t v)
4340 static inline uint64_t VmaPrevPow2(uint64_t v)
4352 static inline bool VmaStrIsEmpty(
const char* pStr)
4354 return pStr == VMA_NULL || *pStr ==
'\0';
4357 #if VMA_STATS_STRING_ENABLED
4359 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4375 #endif // #if VMA_STATS_STRING_ENABLED
4379 template<
typename Iterator,
typename Compare>
4380 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4382 Iterator centerValue = end; --centerValue;
4383 Iterator insertIndex = beg;
4384 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4386 if(cmp(*memTypeIndex, *centerValue))
4388 if(insertIndex != memTypeIndex)
4390 VMA_SWAP(*memTypeIndex, *insertIndex);
4395 if(insertIndex != centerValue)
4397 VMA_SWAP(*insertIndex, *centerValue);
4402 template<
typename Iterator,
typename Compare>
4403 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4407 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4408 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4409 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4413 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4415 #endif // #ifndef VMA_SORT
4424 static inline bool VmaBlocksOnSamePage(
4425 VkDeviceSize resourceAOffset,
4426 VkDeviceSize resourceASize,
4427 VkDeviceSize resourceBOffset,
4428 VkDeviceSize pageSize)
4430 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4431 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4432 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4433 VkDeviceSize resourceBStart = resourceBOffset;
4434 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4435 return resourceAEndPage == resourceBStartPage;
4438 enum VmaSuballocationType
4440 VMA_SUBALLOCATION_TYPE_FREE = 0,
4441 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4442 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4443 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4444 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4445 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4446 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4455 static inline bool VmaIsBufferImageGranularityConflict(
4456 VmaSuballocationType suballocType1,
4457 VmaSuballocationType suballocType2)
4459 if(suballocType1 > suballocType2)
4461 VMA_SWAP(suballocType1, suballocType2);
4464 switch(suballocType1)
4466 case VMA_SUBALLOCATION_TYPE_FREE:
4468 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4470 case VMA_SUBALLOCATION_TYPE_BUFFER:
4472 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4473 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4474 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4476 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4477 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4478 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4479 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4481 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4482 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4490 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4492 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4493 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4494 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4495 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4497 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4504 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4506 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4507 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4508 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4509 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4511 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4524 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4526 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4527 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4528 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4529 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4535 VMA_CLASS_NO_COPY(VmaMutexLock)
4537 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4538 m_pMutex(useMutex ? &mutex : VMA_NULL)
4539 {
if(m_pMutex) { m_pMutex->Lock(); } }
4541 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4543 VMA_MUTEX* m_pMutex;
4547 struct VmaMutexLockRead
4549 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4551 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4552 m_pMutex(useMutex ? &mutex : VMA_NULL)
4553 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4554 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4556 VMA_RW_MUTEX* m_pMutex;
4560 struct VmaMutexLockWrite
4562 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4564 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4565 m_pMutex(useMutex ? &mutex : VMA_NULL)
4566 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4567 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4569 VMA_RW_MUTEX* m_pMutex;
4572 #if VMA_DEBUG_GLOBAL_MUTEX
4573 static VMA_MUTEX gDebugGlobalMutex;
4574 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4576 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4580 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4591 template <
typename CmpLess,
typename IterT,
typename KeyT>
4592 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4594 size_t down = 0, up = (end - beg);
4597 const size_t mid = (down + up) / 2;
4598 if(cmp(*(beg+mid), key))
4610 template<
typename CmpLess,
typename IterT,
typename KeyT>
4611 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4613 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4614 beg, end, value, cmp);
4616 (!cmp(*it, value) && !cmp(value, *it)))
4628 template<
typename T>
4629 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4631 for(uint32_t i = 0; i < count; ++i)
4633 const T iPtr = arr[i];
4634 if(iPtr == VMA_NULL)
4638 for(uint32_t j = i + 1; j < count; ++j)
4649 template<
typename MainT,
typename NewT>
4650 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4652 newStruct->pNext = mainStruct->pNext;
4653 mainStruct->pNext = newStruct;
4659 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4661 void* result = VMA_NULL;
4662 if((pAllocationCallbacks != VMA_NULL) &&
4663 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4665 result = (*pAllocationCallbacks->pfnAllocation)(
4666 pAllocationCallbacks->pUserData,
4669 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4673 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4675 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
4679 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4681 if((pAllocationCallbacks != VMA_NULL) &&
4682 (pAllocationCallbacks->pfnFree != VMA_NULL))
4684 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4688 VMA_SYSTEM_FREE(ptr);
4692 template<
typename T>
4693 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4695 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4698 template<
typename T>
4699 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4701 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4704 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4706 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4708 template<
typename T>
4709 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4712 VmaFree(pAllocationCallbacks, ptr);
4715 template<
typename T>
4716 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4720 for(
size_t i = count; i--; )
4724 VmaFree(pAllocationCallbacks, ptr);
4728 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4730 if(srcStr != VMA_NULL)
4732 const size_t len = strlen(srcStr);
4733 char*
const result = vma_new_array(allocs,
char, len + 1);
4734 memcpy(result, srcStr, len + 1);
4743 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4747 const size_t len = strlen(str);
4748 vma_delete_array(allocs, str, len + 1);
4753 template<
typename T>
4754 class VmaStlAllocator
4757 const VkAllocationCallbacks*
const m_pCallbacks;
4758 typedef T value_type;
4760 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4761 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4763 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4764 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4766 template<
typename U>
4767 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4769 return m_pCallbacks == rhs.m_pCallbacks;
4771 template<
typename U>
4772 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4774 return m_pCallbacks != rhs.m_pCallbacks;
4777 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4780 #if VMA_USE_STL_VECTOR
4782 #define VmaVector std::vector
4784 template<
typename T,
typename allocatorT>
4785 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4787 vec.insert(vec.begin() + index, item);
4790 template<
typename T,
typename allocatorT>
4791 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4793 vec.erase(vec.begin() + index);
4796 #else // #if VMA_USE_STL_VECTOR
4801 template<
typename T,
typename AllocatorT>
4805 typedef T value_type;
4807 VmaVector(
const AllocatorT& allocator) :
4808 m_Allocator(allocator),
4815 VmaVector(
size_t count,
const AllocatorT& allocator) :
4816 m_Allocator(allocator),
4817 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4825 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4826 : VmaVector(count, allocator) {}
4828 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4829 m_Allocator(src.m_Allocator),
4830 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4831 m_Count(src.m_Count),
4832 m_Capacity(src.m_Count)
4836 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4842 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4845 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4849 resize(rhs.m_Count);
4852 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4858 bool empty()
const {
return m_Count == 0; }
4859 size_t size()
const {
return m_Count; }
4860 T* data() {
return m_pArray; }
4861 const T* data()
const {
return m_pArray; }
4863 T& operator[](
size_t index)
4865 VMA_HEAVY_ASSERT(index < m_Count);
4866 return m_pArray[index];
4868 const T& operator[](
size_t index)
const
4870 VMA_HEAVY_ASSERT(index < m_Count);
4871 return m_pArray[index];
4876 VMA_HEAVY_ASSERT(m_Count > 0);
4879 const T& front()
const
4881 VMA_HEAVY_ASSERT(m_Count > 0);
4886 VMA_HEAVY_ASSERT(m_Count > 0);
4887 return m_pArray[m_Count - 1];
4889 const T& back()
const
4891 VMA_HEAVY_ASSERT(m_Count > 0);
4892 return m_pArray[m_Count - 1];
4895 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4897 newCapacity = VMA_MAX(newCapacity, m_Count);
4899 if((newCapacity < m_Capacity) && !freeMemory)
4901 newCapacity = m_Capacity;
4904 if(newCapacity != m_Capacity)
4906 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4909 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4911 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4912 m_Capacity = newCapacity;
4913 m_pArray = newArray;
4917 void resize(
size_t newCount,
bool freeMemory =
false)
4919 size_t newCapacity = m_Capacity;
4920 if(newCount > m_Capacity)
4922 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4926 newCapacity = newCount;
4929 if(newCapacity != m_Capacity)
4931 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4932 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4933 if(elementsToCopy != 0)
4935 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4937 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4938 m_Capacity = newCapacity;
4939 m_pArray = newArray;
4945 void clear(
bool freeMemory =
false)
4947 resize(0, freeMemory);
4950 void insert(
size_t index,
const T& src)
4952 VMA_HEAVY_ASSERT(index <= m_Count);
4953 const size_t oldCount = size();
4954 resize(oldCount + 1);
4955 if(index < oldCount)
4957 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4959 m_pArray[index] = src;
4962 void remove(
size_t index)
4964 VMA_HEAVY_ASSERT(index < m_Count);
4965 const size_t oldCount = size();
4966 if(index < oldCount - 1)
4968 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4970 resize(oldCount - 1);
4973 void push_back(
const T& src)
4975 const size_t newIndex = size();
4976 resize(newIndex + 1);
4977 m_pArray[newIndex] = src;
4982 VMA_HEAVY_ASSERT(m_Count > 0);
4986 void push_front(
const T& src)
4993 VMA_HEAVY_ASSERT(m_Count > 0);
4997 typedef T* iterator;
4999 iterator begin() {
return m_pArray; }
5000 iterator end() {
return m_pArray + m_Count; }
5003 AllocatorT m_Allocator;
5009 template<
typename T,
typename allocatorT>
5010 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
5012 vec.insert(index, item);
5015 template<
typename T,
typename allocatorT>
5016 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
5021 #endif // #if VMA_USE_STL_VECTOR
5023 template<
typename CmpLess,
typename VectorT>
5024 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
5026 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5028 vector.data() + vector.size(),
5030 CmpLess()) - vector.data();
5031 VmaVectorInsert(vector, indexToInsert, value);
5032 return indexToInsert;
5035 template<
typename CmpLess,
typename VectorT>
5036 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
5039 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
5044 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
5046 size_t indexToRemove = it - vector.begin();
5047 VmaVectorRemove(vector, indexToRemove);
5064 template<
typename T,
typename AllocatorT,
size_t N>
5065 class VmaSmallVector
5068 typedef T value_type;
5070 VmaSmallVector(
const AllocatorT& allocator) :
5072 m_DynamicArray(allocator)
5075 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
5077 m_DynamicArray(count > N ? count : 0, allocator)
5080 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5081 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
5082 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
5083 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
5085 bool empty()
const {
return m_Count == 0; }
5086 size_t size()
const {
return m_Count; }
5087 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5088 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
5090 T& operator[](
size_t index)
5092 VMA_HEAVY_ASSERT(index < m_Count);
5093 return data()[index];
5095 const T& operator[](
size_t index)
const
5097 VMA_HEAVY_ASSERT(index < m_Count);
5098 return data()[index];
5103 VMA_HEAVY_ASSERT(m_Count > 0);
5106 const T& front()
const
5108 VMA_HEAVY_ASSERT(m_Count > 0);
5113 VMA_HEAVY_ASSERT(m_Count > 0);
5114 return data()[m_Count - 1];
5116 const T& back()
const
5118 VMA_HEAVY_ASSERT(m_Count > 0);
5119 return data()[m_Count - 1];
5122 void resize(
size_t newCount,
bool freeMemory =
false)
5124 if(newCount > N && m_Count > N)
5127 m_DynamicArray.resize(newCount, freeMemory);
5129 else if(newCount > N && m_Count <= N)
5132 m_DynamicArray.resize(newCount, freeMemory);
5135 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
5138 else if(newCount <= N && m_Count > N)
5143 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
5145 m_DynamicArray.resize(0, freeMemory);
5154 void clear(
bool freeMemory =
false)
5156 m_DynamicArray.clear(freeMemory);
5160 void insert(
size_t index,
const T& src)
5162 VMA_HEAVY_ASSERT(index <= m_Count);
5163 const size_t oldCount = size();
5164 resize(oldCount + 1);
5165 T*
const dataPtr = data();
5166 if(index < oldCount)
5169 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
5171 dataPtr[index] = src;
5174 void remove(
size_t index)
5176 VMA_HEAVY_ASSERT(index < m_Count);
5177 const size_t oldCount = size();
5178 if(index < oldCount - 1)
5181 T*
const dataPtr = data();
5182 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
5184 resize(oldCount - 1);
5187 void push_back(
const T& src)
5189 const size_t newIndex = size();
5190 resize(newIndex + 1);
5191 data()[newIndex] = src;
5196 VMA_HEAVY_ASSERT(m_Count > 0);
5200 void push_front(
const T& src)
5207 VMA_HEAVY_ASSERT(m_Count > 0);
5211 typedef T* iterator;
5213 iterator begin() {
return data(); }
5214 iterator end() {
return data() + m_Count; }
5219 VmaVector<T, AllocatorT> m_DynamicArray;
5230 template<
typename T>
5231 class VmaPoolAllocator
5233 VMA_CLASS_NO_COPY(VmaPoolAllocator)
5235 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
5236 ~VmaPoolAllocator();
5237 template<
typename... Types> T* Alloc(Types... args);
5243 uint32_t NextFreeIndex;
5244 alignas(T)
char Value[
sizeof(T)];
5251 uint32_t FirstFreeIndex;
5254 const VkAllocationCallbacks* m_pAllocationCallbacks;
5255 const uint32_t m_FirstBlockCapacity;
5256 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
5258 ItemBlock& CreateNewBlock();
5261 template<
typename T>
5262 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5263 m_pAllocationCallbacks(pAllocationCallbacks),
5264 m_FirstBlockCapacity(firstBlockCapacity),
5265 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5267 VMA_ASSERT(m_FirstBlockCapacity > 1);
5270 template<
typename T>
5271 VmaPoolAllocator<T>::~VmaPoolAllocator()
5273 for(
size_t i = m_ItemBlocks.size(); i--; )
5274 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5275 m_ItemBlocks.clear();
5278 template<
typename T>
5279 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5281 for(
size_t i = m_ItemBlocks.size(); i--; )
5283 ItemBlock& block = m_ItemBlocks[i];
5285 if(block.FirstFreeIndex != UINT32_MAX)
5287 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5288 block.FirstFreeIndex = pItem->NextFreeIndex;
5289 T* result = (T*)&pItem->Value;
5290 new(result)T(std::forward<Types>(args)...);
5296 ItemBlock& newBlock = CreateNewBlock();
5297 Item*
const pItem = &newBlock.pItems[0];
5298 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5299 T* result = (T*)&pItem->Value;
5300 new(result)T(std::forward<Types>(args)...);
5304 template<
typename T>
5305 void VmaPoolAllocator<T>::Free(T* ptr)
5308 for(
size_t i = m_ItemBlocks.size(); i--; )
5310 ItemBlock& block = m_ItemBlocks[i];
5314 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5317 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5320 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5321 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5322 block.FirstFreeIndex = index;
5326 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5329 template<
typename T>
5330 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5332 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5333 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5335 const ItemBlock newBlock = {
5336 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5340 m_ItemBlocks.push_back(newBlock);
5343 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5344 newBlock.pItems[i].NextFreeIndex = i + 1;
5345 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5346 return m_ItemBlocks.back();
5352 #if VMA_USE_STL_LIST
5354 #define VmaList std::list
5356 #else // #if VMA_USE_STL_LIST
5358 template<
typename T>
5367 template<
typename T>
5370 VMA_CLASS_NO_COPY(VmaRawList)
5372 typedef VmaListItem<T> ItemType;
5374 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5378 size_t GetCount()
const {
return m_Count; }
5379 bool IsEmpty()
const {
return m_Count == 0; }
5381 ItemType* Front() {
return m_pFront; }
5382 const ItemType* Front()
const {
return m_pFront; }
5383 ItemType* Back() {
return m_pBack; }
5384 const ItemType* Back()
const {
return m_pBack; }
5386 ItemType* PushBack();
5387 ItemType* PushFront();
5388 ItemType* PushBack(
const T& value);
5389 ItemType* PushFront(
const T& value);
5394 ItemType* InsertBefore(ItemType* pItem);
5396 ItemType* InsertAfter(ItemType* pItem);
5398 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5399 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5401 void Remove(ItemType* pItem);
5404 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5405 VmaPoolAllocator<ItemType> m_ItemAllocator;
5411 template<
typename T>
5412 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5413 m_pAllocationCallbacks(pAllocationCallbacks),
5414 m_ItemAllocator(pAllocationCallbacks, 128),
5421 template<
typename T>
5422 VmaRawList<T>::~VmaRawList()
5428 template<
typename T>
5429 void VmaRawList<T>::Clear()
5431 if(IsEmpty() ==
false)
5433 ItemType* pItem = m_pBack;
5434 while(pItem != VMA_NULL)
5436 ItemType*
const pPrevItem = pItem->pPrev;
5437 m_ItemAllocator.Free(pItem);
5440 m_pFront = VMA_NULL;
5446 template<
typename T>
5447 VmaListItem<T>* VmaRawList<T>::PushBack()
5449 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5450 pNewItem->pNext = VMA_NULL;
5453 pNewItem->pPrev = VMA_NULL;
5454 m_pFront = pNewItem;
5460 pNewItem->pPrev = m_pBack;
5461 m_pBack->pNext = pNewItem;
5468 template<
typename T>
5469 VmaListItem<T>* VmaRawList<T>::PushFront()
5471 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5472 pNewItem->pPrev = VMA_NULL;
5475 pNewItem->pNext = VMA_NULL;
5476 m_pFront = pNewItem;
5482 pNewItem->pNext = m_pFront;
5483 m_pFront->pPrev = pNewItem;
5484 m_pFront = pNewItem;
5490 template<
typename T>
5491 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5493 ItemType*
const pNewItem = PushBack();
5494 pNewItem->Value = value;
5498 template<
typename T>
5499 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5501 ItemType*
const pNewItem = PushFront();
5502 pNewItem->Value = value;
5506 template<
typename T>
5507 void VmaRawList<T>::PopBack()
5509 VMA_HEAVY_ASSERT(m_Count > 0);
5510 ItemType*
const pBackItem = m_pBack;
5511 ItemType*
const pPrevItem = pBackItem->pPrev;
5512 if(pPrevItem != VMA_NULL)
5514 pPrevItem->pNext = VMA_NULL;
5516 m_pBack = pPrevItem;
5517 m_ItemAllocator.Free(pBackItem);
5521 template<
typename T>
5522 void VmaRawList<T>::PopFront()
5524 VMA_HEAVY_ASSERT(m_Count > 0);
5525 ItemType*
const pFrontItem = m_pFront;
5526 ItemType*
const pNextItem = pFrontItem->pNext;
5527 if(pNextItem != VMA_NULL)
5529 pNextItem->pPrev = VMA_NULL;
5531 m_pFront = pNextItem;
5532 m_ItemAllocator.Free(pFrontItem);
5536 template<
typename T>
5537 void VmaRawList<T>::Remove(ItemType* pItem)
5539 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5540 VMA_HEAVY_ASSERT(m_Count > 0);
5542 if(pItem->pPrev != VMA_NULL)
5544 pItem->pPrev->pNext = pItem->pNext;
5548 VMA_HEAVY_ASSERT(m_pFront == pItem);
5549 m_pFront = pItem->pNext;
5552 if(pItem->pNext != VMA_NULL)
5554 pItem->pNext->pPrev = pItem->pPrev;
5558 VMA_HEAVY_ASSERT(m_pBack == pItem);
5559 m_pBack = pItem->pPrev;
5562 m_ItemAllocator.Free(pItem);
5566 template<
typename T>
5567 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5569 if(pItem != VMA_NULL)
5571 ItemType*
const prevItem = pItem->pPrev;
5572 ItemType*
const newItem = m_ItemAllocator.Alloc();
5573 newItem->pPrev = prevItem;
5574 newItem->pNext = pItem;
5575 pItem->pPrev = newItem;
5576 if(prevItem != VMA_NULL)
5578 prevItem->pNext = newItem;
5582 VMA_HEAVY_ASSERT(m_pFront == pItem);
5592 template<
typename T>
5593 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5595 if(pItem != VMA_NULL)
5597 ItemType*
const nextItem = pItem->pNext;
5598 ItemType*
const newItem = m_ItemAllocator.Alloc();
5599 newItem->pNext = nextItem;
5600 newItem->pPrev = pItem;
5601 pItem->pNext = newItem;
5602 if(nextItem != VMA_NULL)
5604 nextItem->pPrev = newItem;
5608 VMA_HEAVY_ASSERT(m_pBack == pItem);
5618 template<
typename T>
5619 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5621 ItemType*
const newItem = InsertBefore(pItem);
5622 newItem->Value = value;
5626 template<
typename T>
5627 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5629 ItemType*
const newItem = InsertAfter(pItem);
5630 newItem->Value = value;
5634 template<
typename T,
typename AllocatorT>
5637 VMA_CLASS_NO_COPY(VmaList)
5648 T& operator*()
const
5650 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5651 return m_pItem->Value;
5653 T* operator->()
const
5655 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5656 return &m_pItem->Value;
5659 iterator& operator++()
5661 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5662 m_pItem = m_pItem->pNext;
5665 iterator& operator--()
5667 if(m_pItem != VMA_NULL)
5669 m_pItem = m_pItem->pPrev;
5673 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5674 m_pItem = m_pList->Back();
5679 iterator operator++(
int)
5681 iterator result = *
this;
5685 iterator operator--(
int)
5687 iterator result = *
this;
5692 bool operator==(
const iterator& rhs)
const
5694 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5695 return m_pItem == rhs.m_pItem;
5697 bool operator!=(
const iterator& rhs)
const
5699 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5700 return m_pItem != rhs.m_pItem;
5704 VmaRawList<T>* m_pList;
5705 VmaListItem<T>* m_pItem;
5707 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5713 friend class VmaList<T, AllocatorT>;
5716 class const_iterator
5725 const_iterator(
const iterator& src) :
5726 m_pList(src.m_pList),
5727 m_pItem(src.m_pItem)
5731 const T& operator*()
const
5733 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5734 return m_pItem->Value;
5736 const T* operator->()
const
5738 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5739 return &m_pItem->Value;
5742 const_iterator& operator++()
5744 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5745 m_pItem = m_pItem->pNext;
5748 const_iterator& operator--()
5750 if(m_pItem != VMA_NULL)
5752 m_pItem = m_pItem->pPrev;
5756 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5757 m_pItem = m_pList->Back();
5762 const_iterator operator++(
int)
5764 const_iterator result = *
this;
5768 const_iterator operator--(
int)
5770 const_iterator result = *
this;
5775 bool operator==(
const const_iterator& rhs)
const
5777 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5778 return m_pItem == rhs.m_pItem;
5780 bool operator!=(
const const_iterator& rhs)
const
5782 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5783 return m_pItem != rhs.m_pItem;
5787 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5793 const VmaRawList<T>* m_pList;
5794 const VmaListItem<T>* m_pItem;
5796 friend class VmaList<T, AllocatorT>;
5799 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5801 bool empty()
const {
return m_RawList.IsEmpty(); }
5802 size_t size()
const {
return m_RawList.GetCount(); }
5804 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5805 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5807 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5808 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5810 void clear() { m_RawList.Clear(); }
5811 void push_back(
const T& value) { m_RawList.PushBack(value); }
5812 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5813 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5816 VmaRawList<T> m_RawList;
5819 #endif // #if VMA_USE_STL_LIST
5827 #if VMA_USE_STL_UNORDERED_MAP
5829 #define VmaPair std::pair
5831 #define VMA_MAP_TYPE(KeyT, ValueT) \
5832 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5834 #else // #if VMA_USE_STL_UNORDERED_MAP
5836 template<
typename T1,
typename T2>
5842 VmaPair() : first(), second() { }
5843 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5849 template<
typename KeyT,
typename ValueT>
5853 typedef VmaPair<KeyT, ValueT> PairType;
5854 typedef PairType* iterator;
5856 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5858 iterator begin() {
return m_Vector.begin(); }
5859 iterator end() {
return m_Vector.end(); }
5861 void insert(
const PairType& pair);
5862 iterator find(
const KeyT& key);
5863 void erase(iterator it);
5866 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5869 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5871 template<
typename FirstT,
typename SecondT>
5872 struct VmaPairFirstLess
5874 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5876 return lhs.first < rhs.first;
5878 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5880 return lhs.first < rhsFirst;
5884 template<
typename KeyT,
typename ValueT>
5885 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5887 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5889 m_Vector.data() + m_Vector.size(),
5891 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5892 VmaVectorInsert(m_Vector, indexToInsert, pair);
5895 template<
typename KeyT,
typename ValueT>
5896 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5898 PairType* it = VmaBinaryFindFirstNotLess(
5900 m_Vector.data() + m_Vector.size(),
5902 VmaPairFirstLess<KeyT, ValueT>());
5903 if((it != m_Vector.end()) && (it->first == key))
5909 return m_Vector.end();
5913 template<
typename KeyT,
typename ValueT>
5914 void VmaMap<KeyT, ValueT>::erase(iterator it)
5916 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5919 #endif // #if VMA_USE_STL_UNORDERED_MAP
5925 class VmaDeviceMemoryBlock;
5927 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5929 struct VmaAllocation_T
5932 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5936 FLAG_USER_DATA_STRING = 0x01,
5940 enum ALLOCATION_TYPE
5942 ALLOCATION_TYPE_NONE,
5943 ALLOCATION_TYPE_BLOCK,
5944 ALLOCATION_TYPE_DEDICATED,
5951 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5954 m_pUserData{VMA_NULL},
5955 m_LastUseFrameIndex{currentFrameIndex},
5956 m_MemoryTypeIndex{0},
5957 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5958 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5960 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5962 #if VMA_STATS_STRING_ENABLED
5963 m_CreationFrameIndex = currentFrameIndex;
5964 m_BufferImageUsage = 0;
5970 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5973 VMA_ASSERT(m_pUserData == VMA_NULL);
5976 void InitBlockAllocation(
5977 VmaDeviceMemoryBlock* block,
5978 VkDeviceSize offset,
5979 VkDeviceSize alignment,
5981 uint32_t memoryTypeIndex,
5982 VmaSuballocationType suballocationType,
5986 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5987 VMA_ASSERT(block != VMA_NULL);
5988 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5989 m_Alignment = alignment;
5991 m_MemoryTypeIndex = memoryTypeIndex;
5992 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5993 m_SuballocationType = (uint8_t)suballocationType;
5994 m_BlockAllocation.m_Block = block;
5995 m_BlockAllocation.m_Offset = offset;
5996 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
6001 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6002 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
6003 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
6004 m_MemoryTypeIndex = 0;
6005 m_BlockAllocation.m_Block = VMA_NULL;
6006 m_BlockAllocation.m_Offset = 0;
6007 m_BlockAllocation.m_CanBecomeLost =
true;
6010 void ChangeBlockAllocation(
6012 VmaDeviceMemoryBlock* block,
6013 VkDeviceSize offset);
6015 void ChangeOffset(VkDeviceSize newOffset);
6018 void InitDedicatedAllocation(
6019 uint32_t memoryTypeIndex,
6020 VkDeviceMemory hMemory,
6021 VmaSuballocationType suballocationType,
6025 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
6026 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
6027 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
6030 m_MemoryTypeIndex = memoryTypeIndex;
6031 m_SuballocationType = (uint8_t)suballocationType;
6032 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
6033 m_DedicatedAllocation.m_hMemory = hMemory;
6034 m_DedicatedAllocation.m_pMappedData = pMappedData;
6037 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
6038 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
6039 VkDeviceSize GetSize()
const {
return m_Size; }
6040 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
6041 void* GetUserData()
const {
return m_pUserData; }
6042 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
6043 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
6045 VmaDeviceMemoryBlock* GetBlock()
const
6047 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6048 return m_BlockAllocation.m_Block;
6050 VkDeviceSize GetOffset()
const;
6051 VkDeviceMemory GetMemory()
const;
6052 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6053 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
6054 void* GetMappedData()
const;
6055 bool CanBecomeLost()
const;
6057 uint32_t GetLastUseFrameIndex()
const
6059 return m_LastUseFrameIndex.load();
6061 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
6063 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
6073 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6075 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
6077 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
6088 void BlockAllocMap();
6089 void BlockAllocUnmap();
6090 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
6093 #if VMA_STATS_STRING_ENABLED
6094 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
6095 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
6097 void InitBufferImageUsage(uint32_t bufferImageUsage)
6099 VMA_ASSERT(m_BufferImageUsage == 0);
6100 m_BufferImageUsage = bufferImageUsage;
6103 void PrintParameters(
class VmaJsonWriter& json)
const;
6107 VkDeviceSize m_Alignment;
6108 VkDeviceSize m_Size;
6110 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
6111 uint32_t m_MemoryTypeIndex;
6113 uint8_t m_SuballocationType;
6120 struct BlockAllocation
6122 VmaDeviceMemoryBlock* m_Block;
6123 VkDeviceSize m_Offset;
6124 bool m_CanBecomeLost;
6128 struct DedicatedAllocation
6130 VkDeviceMemory m_hMemory;
6131 void* m_pMappedData;
6137 BlockAllocation m_BlockAllocation;
6139 DedicatedAllocation m_DedicatedAllocation;
6142 #if VMA_STATS_STRING_ENABLED
6143 uint32_t m_CreationFrameIndex;
6144 uint32_t m_BufferImageUsage;
6154 struct VmaSuballocation
6156 VkDeviceSize offset;
6159 VmaSuballocationType type;
6163 struct VmaSuballocationOffsetLess
6165 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6167 return lhs.offset < rhs.offset;
6170 struct VmaSuballocationOffsetGreater
6172 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
6174 return lhs.offset > rhs.offset;
6178 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
6181 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
6183 enum class VmaAllocationRequestType
6205 struct VmaAllocationRequest
6207 VkDeviceSize offset;
6208 VkDeviceSize sumFreeSize;
6209 VkDeviceSize sumItemSize;
6210 VmaSuballocationList::iterator item;
6211 size_t itemsToMakeLostCount;
6213 VmaAllocationRequestType type;
6215 VkDeviceSize CalcCost()
const
6217 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
6225 class VmaBlockMetadata
6229 virtual ~VmaBlockMetadata() { }
6230 virtual void Init(VkDeviceSize size) { m_Size = size; }
6233 virtual bool Validate()
const = 0;
6234 VkDeviceSize GetSize()
const {
return m_Size; }
6235 virtual size_t GetAllocationCount()
const = 0;
6236 virtual VkDeviceSize GetSumFreeSize()
const = 0;
6237 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
6239 virtual bool IsEmpty()
const = 0;
6241 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
6243 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
6245 #if VMA_STATS_STRING_ENABLED
6246 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
6252 virtual bool CreateAllocationRequest(
6253 uint32_t currentFrameIndex,
6254 uint32_t frameInUseCount,
6255 VkDeviceSize bufferImageGranularity,
6256 VkDeviceSize allocSize,
6257 VkDeviceSize allocAlignment,
6259 VmaSuballocationType allocType,
6260 bool canMakeOtherLost,
6263 VmaAllocationRequest* pAllocationRequest) = 0;
6265 virtual bool MakeRequestedAllocationsLost(
6266 uint32_t currentFrameIndex,
6267 uint32_t frameInUseCount,
6268 VmaAllocationRequest* pAllocationRequest) = 0;
6270 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6272 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6276 const VmaAllocationRequest& request,
6277 VmaSuballocationType type,
6278 VkDeviceSize allocSize,
6283 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6286 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6288 #if VMA_STATS_STRING_ENABLED
6289 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6290 VkDeviceSize unusedBytes,
6291 size_t allocationCount,
6292 size_t unusedRangeCount)
const;
6293 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6294 VkDeviceSize offset,
6296 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6297 VkDeviceSize offset,
6298 VkDeviceSize size)
const;
6299 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6303 VkDeviceSize m_Size;
6304 const VkAllocationCallbacks* m_pAllocationCallbacks;
6307 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6308 VMA_ASSERT(0 && "Validation failed: " #cond); \
6312 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6314 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6317 virtual ~VmaBlockMetadata_Generic();
6318 virtual void Init(VkDeviceSize size);
6320 virtual bool Validate()
const;
6321 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6322 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6323 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6324 virtual bool IsEmpty()
const;
6326 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6327 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6329 #if VMA_STATS_STRING_ENABLED
6330 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6333 virtual bool CreateAllocationRequest(
6334 uint32_t currentFrameIndex,
6335 uint32_t frameInUseCount,
6336 VkDeviceSize bufferImageGranularity,
6337 VkDeviceSize allocSize,
6338 VkDeviceSize allocAlignment,
6340 VmaSuballocationType allocType,
6341 bool canMakeOtherLost,
6343 VmaAllocationRequest* pAllocationRequest);
6345 virtual bool MakeRequestedAllocationsLost(
6346 uint32_t currentFrameIndex,
6347 uint32_t frameInUseCount,
6348 VmaAllocationRequest* pAllocationRequest);
6350 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6352 virtual VkResult CheckCorruption(
const void* pBlockData);
6355 const VmaAllocationRequest& request,
6356 VmaSuballocationType type,
6357 VkDeviceSize allocSize,
6361 virtual void FreeAtOffset(VkDeviceSize offset);
6366 bool IsBufferImageGranularityConflictPossible(
6367 VkDeviceSize bufferImageGranularity,
6368 VmaSuballocationType& inOutPrevSuballocType)
const;
6371 friend class VmaDefragmentationAlgorithm_Generic;
6372 friend class VmaDefragmentationAlgorithm_Fast;
6374 uint32_t m_FreeCount;
6375 VkDeviceSize m_SumFreeSize;
6376 VmaSuballocationList m_Suballocations;
6379 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6381 bool ValidateFreeSuballocationList()
const;
6385 bool CheckAllocation(
6386 uint32_t currentFrameIndex,
6387 uint32_t frameInUseCount,
6388 VkDeviceSize bufferImageGranularity,
6389 VkDeviceSize allocSize,
6390 VkDeviceSize allocAlignment,
6391 VmaSuballocationType allocType,
6392 VmaSuballocationList::const_iterator suballocItem,
6393 bool canMakeOtherLost,
6394 VkDeviceSize* pOffset,
6395 size_t* itemsToMakeLostCount,
6396 VkDeviceSize* pSumFreeSize,
6397 VkDeviceSize* pSumItemSize)
const;
6399 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6403 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6406 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6409 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6490 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6492 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6495 virtual ~VmaBlockMetadata_Linear();
6496 virtual void Init(VkDeviceSize size);
6498 virtual bool Validate()
const;
6499 virtual size_t GetAllocationCount()
const;
6500 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6501 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6502 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6504 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6505 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6507 #if VMA_STATS_STRING_ENABLED
6508 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6511 virtual bool CreateAllocationRequest(
6512 uint32_t currentFrameIndex,
6513 uint32_t frameInUseCount,
6514 VkDeviceSize bufferImageGranularity,
6515 VkDeviceSize allocSize,
6516 VkDeviceSize allocAlignment,
6518 VmaSuballocationType allocType,
6519 bool canMakeOtherLost,
6521 VmaAllocationRequest* pAllocationRequest);
6523 virtual bool MakeRequestedAllocationsLost(
6524 uint32_t currentFrameIndex,
6525 uint32_t frameInUseCount,
6526 VmaAllocationRequest* pAllocationRequest);
6528 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6530 virtual VkResult CheckCorruption(
const void* pBlockData);
6533 const VmaAllocationRequest& request,
6534 VmaSuballocationType type,
6535 VkDeviceSize allocSize,
6539 virtual void FreeAtOffset(VkDeviceSize offset);
6549 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6551 enum SECOND_VECTOR_MODE
6553 SECOND_VECTOR_EMPTY,
6558 SECOND_VECTOR_RING_BUFFER,
6564 SECOND_VECTOR_DOUBLE_STACK,
6567 VkDeviceSize m_SumFreeSize;
6568 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6569 uint32_t m_1stVectorIndex;
6570 SECOND_VECTOR_MODE m_2ndVectorMode;
6572 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6573 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6574 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6575 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6578 size_t m_1stNullItemsBeginCount;
6580 size_t m_1stNullItemsMiddleCount;
6582 size_t m_2ndNullItemsCount;
6584 bool ShouldCompact1st()
const;
6585 void CleanupAfterFree();
6587 bool CreateAllocationRequest_LowerAddress(
6588 uint32_t currentFrameIndex,
6589 uint32_t frameInUseCount,
6590 VkDeviceSize bufferImageGranularity,
6591 VkDeviceSize allocSize,
6592 VkDeviceSize allocAlignment,
6593 VmaSuballocationType allocType,
6594 bool canMakeOtherLost,
6596 VmaAllocationRequest* pAllocationRequest);
6597 bool CreateAllocationRequest_UpperAddress(
6598 uint32_t currentFrameIndex,
6599 uint32_t frameInUseCount,
6600 VkDeviceSize bufferImageGranularity,
6601 VkDeviceSize allocSize,
6602 VkDeviceSize allocAlignment,
6603 VmaSuballocationType allocType,
6604 bool canMakeOtherLost,
6606 VmaAllocationRequest* pAllocationRequest);
6620 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6622 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6625 virtual ~VmaBlockMetadata_Buddy();
6626 virtual void Init(VkDeviceSize size);
6628 virtual bool Validate()
const;
6629 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6630 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6631 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6632 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6634 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6635 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6637 #if VMA_STATS_STRING_ENABLED
6638 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6641 virtual bool CreateAllocationRequest(
6642 uint32_t currentFrameIndex,
6643 uint32_t frameInUseCount,
6644 VkDeviceSize bufferImageGranularity,
6645 VkDeviceSize allocSize,
6646 VkDeviceSize allocAlignment,
6648 VmaSuballocationType allocType,
6649 bool canMakeOtherLost,
6651 VmaAllocationRequest* pAllocationRequest);
6653 virtual bool MakeRequestedAllocationsLost(
6654 uint32_t currentFrameIndex,
6655 uint32_t frameInUseCount,
6656 VmaAllocationRequest* pAllocationRequest);
6658 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6660 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6663 const VmaAllocationRequest& request,
6664 VmaSuballocationType type,
6665 VkDeviceSize allocSize,
6668 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6669 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6672 static const VkDeviceSize MIN_NODE_SIZE = 32;
6673 static const size_t MAX_LEVELS = 30;
6675 struct ValidationContext
6677 size_t calculatedAllocationCount;
6678 size_t calculatedFreeCount;
6679 VkDeviceSize calculatedSumFreeSize;
6681 ValidationContext() :
6682 calculatedAllocationCount(0),
6683 calculatedFreeCount(0),
6684 calculatedSumFreeSize(0) { }
6689 VkDeviceSize offset;
6719 VkDeviceSize m_UsableSize;
6720 uint32_t m_LevelCount;
6726 } m_FreeList[MAX_LEVELS];
6728 size_t m_AllocationCount;
6732 VkDeviceSize m_SumFreeSize;
6734 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6735 void DeleteNode(Node* node);
6736 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6737 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6738 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6740 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6741 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6745 void AddToFreeListFront(uint32_t level, Node* node);
6749 void RemoveFromFreeList(uint32_t level, Node* node);
6751 #if VMA_STATS_STRING_ENABLED
6752 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6762 class VmaDeviceMemoryBlock
6764 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6766 VmaBlockMetadata* m_pMetadata;
6770 ~VmaDeviceMemoryBlock()
6772 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6773 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6780 uint32_t newMemoryTypeIndex,
6781 VkDeviceMemory newMemory,
6782 VkDeviceSize newSize,
6784 uint32_t algorithm);
6788 VmaPool GetParentPool()
const {
return m_hParentPool; }
6789 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6790 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6791 uint32_t GetId()
const {
return m_Id; }
6792 void* GetMappedData()
const {
return m_pMappedData; }
6795 bool Validate()
const;
6800 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6803 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6804 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6806 VkResult BindBufferMemory(
6809 VkDeviceSize allocationLocalOffset,
6812 VkResult BindImageMemory(
6815 VkDeviceSize allocationLocalOffset,
6821 uint32_t m_MemoryTypeIndex;
6823 VkDeviceMemory m_hMemory;
6831 uint32_t m_MapCount;
6832 void* m_pMappedData;
6835 struct VmaPointerLess
6837 bool operator()(
const void* lhs,
const void* rhs)
const
6843 struct VmaDefragmentationMove
6845 size_t srcBlockIndex;
6846 size_t dstBlockIndex;
6847 VkDeviceSize srcOffset;
6848 VkDeviceSize dstOffset;
6851 VmaDeviceMemoryBlock* pSrcBlock;
6852 VmaDeviceMemoryBlock* pDstBlock;
6855 class VmaDefragmentationAlgorithm;
6863 struct VmaBlockVector
6865 VMA_CLASS_NO_COPY(VmaBlockVector)
6870 uint32_t memoryTypeIndex,
6871 VkDeviceSize preferredBlockSize,
6872 size_t minBlockCount,
6873 size_t maxBlockCount,
6874 VkDeviceSize bufferImageGranularity,
6875 uint32_t frameInUseCount,
6876 bool explicitBlockSize,
6877 uint32_t algorithm);
6880 VkResult CreateMinBlocks();
6882 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6883 VmaPool GetParentPool()
const {
return m_hParentPool; }
6884 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6885 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6886 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6887 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6888 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6889 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6894 bool IsCorruptionDetectionEnabled()
const;
6897 uint32_t currentFrameIndex,
6899 VkDeviceSize alignment,
6901 VmaSuballocationType suballocType,
6902 size_t allocationCount,
6910 #if VMA_STATS_STRING_ENABLED
6911 void PrintDetailedMap(
class VmaJsonWriter& json);
6914 void MakePoolAllocationsLost(
6915 uint32_t currentFrameIndex,
6916 size_t* pLostAllocationCount);
6917 VkResult CheckCorruption();
6921 class VmaBlockVectorDefragmentationContext* pCtx,
6923 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6924 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6925 VkCommandBuffer commandBuffer);
6926 void DefragmentationEnd(
6927 class VmaBlockVectorDefragmentationContext* pCtx,
6931 uint32_t ProcessDefragmentations(
6932 class VmaBlockVectorDefragmentationContext *pCtx,
6935 void CommitDefragmentations(
6936 class VmaBlockVectorDefragmentationContext *pCtx,
6942 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6943 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6944 size_t CalcAllocationCount()
const;
6945 bool IsBufferImageGranularityConflictPossible()
const;
6948 friend class VmaDefragmentationAlgorithm_Generic;
6952 const uint32_t m_MemoryTypeIndex;
6953 const VkDeviceSize m_PreferredBlockSize;
6954 const size_t m_MinBlockCount;
6955 const size_t m_MaxBlockCount;
6956 const VkDeviceSize m_BufferImageGranularity;
6957 const uint32_t m_FrameInUseCount;
6958 const bool m_ExplicitBlockSize;
6959 const uint32_t m_Algorithm;
6960 VMA_RW_MUTEX m_Mutex;
6964 bool m_HasEmptyBlock;
6966 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6967 uint32_t m_NextBlockId;
6969 VkDeviceSize CalcMaxBlockSize()
const;
6972 void Remove(VmaDeviceMemoryBlock* pBlock);
6976 void IncrementallySortBlocks();
6978 VkResult AllocatePage(
6979 uint32_t currentFrameIndex,
6981 VkDeviceSize alignment,
6983 VmaSuballocationType suballocType,
6987 VkResult AllocateFromBlock(
6988 VmaDeviceMemoryBlock* pBlock,
6989 uint32_t currentFrameIndex,
6991 VkDeviceSize alignment,
6994 VmaSuballocationType suballocType,
6998 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
7001 void ApplyDefragmentationMovesCpu(
7002 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7003 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
7005 void ApplyDefragmentationMovesGpu(
7006 class VmaBlockVectorDefragmentationContext* pDefragCtx,
7007 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7008 VkCommandBuffer commandBuffer);
7016 void UpdateHasEmptyBlock();
7021 VMA_CLASS_NO_COPY(VmaPool_T)
7023 VmaBlockVector m_BlockVector;
7028 VkDeviceSize preferredBlockSize);
7031 uint32_t GetId()
const {
return m_Id; }
7032 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
7034 const char* GetName()
const {
return m_Name; }
7035 void SetName(
const char* pName);
7037 #if VMA_STATS_STRING_ENABLED
7053 class VmaDefragmentationAlgorithm
7055 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
7057 VmaDefragmentationAlgorithm(
7059 VmaBlockVector* pBlockVector,
7060 uint32_t currentFrameIndex) :
7061 m_hAllocator(hAllocator),
7062 m_pBlockVector(pBlockVector),
7063 m_CurrentFrameIndex(currentFrameIndex)
7066 virtual ~VmaDefragmentationAlgorithm()
7070 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
7071 virtual void AddAll() = 0;
7073 virtual VkResult Defragment(
7074 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7075 VkDeviceSize maxBytesToMove,
7076 uint32_t maxAllocationsToMove,
7079 virtual VkDeviceSize GetBytesMoved()
const = 0;
7080 virtual uint32_t GetAllocationsMoved()
const = 0;
7084 VmaBlockVector*
const m_pBlockVector;
7085 const uint32_t m_CurrentFrameIndex;
7087 struct AllocationInfo
7090 VkBool32* m_pChanged;
7093 m_hAllocation(VK_NULL_HANDLE),
7094 m_pChanged(VMA_NULL)
7098 m_hAllocation(hAlloc),
7099 m_pChanged(pChanged)
7105 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
7107 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
7109 VmaDefragmentationAlgorithm_Generic(
7111 VmaBlockVector* pBlockVector,
7112 uint32_t currentFrameIndex,
7113 bool overlappingMoveSupported);
7114 virtual ~VmaDefragmentationAlgorithm_Generic();
7116 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7117 virtual void AddAll() { m_AllAllocations =
true; }
7119 virtual VkResult Defragment(
7120 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7121 VkDeviceSize maxBytesToMove,
7122 uint32_t maxAllocationsToMove,
7125 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7126 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7129 uint32_t m_AllocationCount;
7130 bool m_AllAllocations;
7132 VkDeviceSize m_BytesMoved;
7133 uint32_t m_AllocationsMoved;
7135 struct AllocationInfoSizeGreater
7137 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7139 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
7143 struct AllocationInfoOffsetGreater
7145 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
7147 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
7153 size_t m_OriginalBlockIndex;
7154 VmaDeviceMemoryBlock* m_pBlock;
7155 bool m_HasNonMovableAllocations;
7156 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
7158 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
7159 m_OriginalBlockIndex(SIZE_MAX),
7161 m_HasNonMovableAllocations(true),
7162 m_Allocations(pAllocationCallbacks)
7166 void CalcHasNonMovableAllocations()
7168 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
7169 const size_t defragmentAllocCount = m_Allocations.size();
7170 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
7173 void SortAllocationsBySizeDescending()
7175 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
7178 void SortAllocationsByOffsetDescending()
7180 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
7184 struct BlockPointerLess
7186 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
7188 return pLhsBlockInfo->m_pBlock < pRhsBlock;
7190 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7192 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
7198 struct BlockInfoCompareMoveDestination
7200 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
7202 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
7206 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
7210 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
7218 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
7219 BlockInfoVector m_Blocks;
7221 VkResult DefragmentRound(
7222 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7223 VkDeviceSize maxBytesToMove,
7224 uint32_t maxAllocationsToMove,
7225 bool freeOldAllocations);
7227 size_t CalcBlocksWithNonMovableCount()
const;
7229 static bool MoveMakesSense(
7230 size_t dstBlockIndex, VkDeviceSize dstOffset,
7231 size_t srcBlockIndex, VkDeviceSize srcOffset);
7234 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
7236 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
7238 VmaDefragmentationAlgorithm_Fast(
7240 VmaBlockVector* pBlockVector,
7241 uint32_t currentFrameIndex,
7242 bool overlappingMoveSupported);
7243 virtual ~VmaDefragmentationAlgorithm_Fast();
7245 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
7246 virtual void AddAll() { m_AllAllocations =
true; }
7248 virtual VkResult Defragment(
7249 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
7250 VkDeviceSize maxBytesToMove,
7251 uint32_t maxAllocationsToMove,
7254 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
7255 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
7260 size_t origBlockIndex;
7263 class FreeSpaceDatabase
7269 s.blockInfoIndex = SIZE_MAX;
7270 for(
size_t i = 0; i < MAX_COUNT; ++i)
7272 m_FreeSpaces[i] = s;
7276 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7278 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7284 size_t bestIndex = SIZE_MAX;
7285 for(
size_t i = 0; i < MAX_COUNT; ++i)
7288 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7293 if(m_FreeSpaces[i].size < size &&
7294 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7300 if(bestIndex != SIZE_MAX)
7302 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7303 m_FreeSpaces[bestIndex].offset = offset;
7304 m_FreeSpaces[bestIndex].size = size;
7308 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7309 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7311 size_t bestIndex = SIZE_MAX;
7312 VkDeviceSize bestFreeSpaceAfter = 0;
7313 for(
size_t i = 0; i < MAX_COUNT; ++i)
7316 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7318 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7320 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7322 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7324 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7327 bestFreeSpaceAfter = freeSpaceAfter;
7333 if(bestIndex != SIZE_MAX)
7335 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7336 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7338 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7341 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7342 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7343 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7348 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7358 static const size_t MAX_COUNT = 4;
7362 size_t blockInfoIndex;
7363 VkDeviceSize offset;
7365 } m_FreeSpaces[MAX_COUNT];
7368 const bool m_OverlappingMoveSupported;
7370 uint32_t m_AllocationCount;
7371 bool m_AllAllocations;
7373 VkDeviceSize m_BytesMoved;
7374 uint32_t m_AllocationsMoved;
7376 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7378 void PreprocessMetadata();
7379 void PostprocessMetadata();
7380 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7383 struct VmaBlockDefragmentationContext
7387 BLOCK_FLAG_USED = 0x00000001,
7393 class VmaBlockVectorDefragmentationContext
7395 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7399 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7400 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7401 uint32_t defragmentationMovesProcessed;
7402 uint32_t defragmentationMovesCommitted;
7403 bool hasDefragmentationPlan;
7405 VmaBlockVectorDefragmentationContext(
7408 VmaBlockVector* pBlockVector,
7409 uint32_t currFrameIndex);
7410 ~VmaBlockVectorDefragmentationContext();
7412 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7413 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7414 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7416 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7417 void AddAll() { m_AllAllocations =
true; }
7426 VmaBlockVector*
const m_pBlockVector;
7427 const uint32_t m_CurrFrameIndex;
7429 VmaDefragmentationAlgorithm* m_pAlgorithm;
7437 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7438 bool m_AllAllocations;
7441 struct VmaDefragmentationContext_T
7444 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7446 VmaDefragmentationContext_T(
7448 uint32_t currFrameIndex,
7451 ~VmaDefragmentationContext_T();
7453 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7454 void AddAllocations(
7455 uint32_t allocationCount,
7457 VkBool32* pAllocationsChanged);
7465 VkResult Defragment(
7466 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7467 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7471 VkResult DefragmentPassEnd();
7475 const uint32_t m_CurrFrameIndex;
7476 const uint32_t m_Flags;
7479 VkDeviceSize m_MaxCpuBytesToMove;
7480 uint32_t m_MaxCpuAllocationsToMove;
7481 VkDeviceSize m_MaxGpuBytesToMove;
7482 uint32_t m_MaxGpuAllocationsToMove;
7485 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7487 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7490 #if VMA_RECORDING_ENABLED
7497 void WriteConfiguration(
7498 const VkPhysicalDeviceProperties& devProps,
7499 const VkPhysicalDeviceMemoryProperties& memProps,
7500 uint32_t vulkanApiVersion,
7501 bool dedicatedAllocationExtensionEnabled,
7502 bool bindMemory2ExtensionEnabled,
7503 bool memoryBudgetExtensionEnabled,
7504 bool deviceCoherentMemoryExtensionEnabled);
7507 void RecordCreateAllocator(uint32_t frameIndex);
7508 void RecordDestroyAllocator(uint32_t frameIndex);
7509 void RecordCreatePool(uint32_t frameIndex,
7512 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7513 void RecordAllocateMemory(uint32_t frameIndex,
7514 const VkMemoryRequirements& vkMemReq,
7517 void RecordAllocateMemoryPages(uint32_t frameIndex,
7518 const VkMemoryRequirements& vkMemReq,
7520 uint64_t allocationCount,
7522 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7523 const VkMemoryRequirements& vkMemReq,
7524 bool requiresDedicatedAllocation,
7525 bool prefersDedicatedAllocation,
7528 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7529 const VkMemoryRequirements& vkMemReq,
7530 bool requiresDedicatedAllocation,
7531 bool prefersDedicatedAllocation,
7534 void RecordFreeMemory(uint32_t frameIndex,
7536 void RecordFreeMemoryPages(uint32_t frameIndex,
7537 uint64_t allocationCount,
7539 void RecordSetAllocationUserData(uint32_t frameIndex,
7541 const void* pUserData);
7542 void RecordCreateLostAllocation(uint32_t frameIndex,
7544 void RecordMapMemory(uint32_t frameIndex,
7546 void RecordUnmapMemory(uint32_t frameIndex,
7548 void RecordFlushAllocation(uint32_t frameIndex,
7549 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7550 void RecordInvalidateAllocation(uint32_t frameIndex,
7551 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7552 void RecordCreateBuffer(uint32_t frameIndex,
7553 const VkBufferCreateInfo& bufCreateInfo,
7556 void RecordCreateImage(uint32_t frameIndex,
7557 const VkImageCreateInfo& imageCreateInfo,
7560 void RecordDestroyBuffer(uint32_t frameIndex,
7562 void RecordDestroyImage(uint32_t frameIndex,
7564 void RecordTouchAllocation(uint32_t frameIndex,
7566 void RecordGetAllocationInfo(uint32_t frameIndex,
7568 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7570 void RecordDefragmentationBegin(uint32_t frameIndex,
7573 void RecordDefragmentationEnd(uint32_t frameIndex,
7575 void RecordSetPoolName(uint32_t frameIndex,
7586 class UserDataString
7590 const char* GetString()
const {
return m_Str; }
7600 VMA_MUTEX m_FileMutex;
7601 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
7603 void GetBasicParams(CallParams& outParams);
7606 template<
typename T>
7607 void PrintPointerList(uint64_t count,
const T* pItems)
7611 fprintf(m_File,
"%p", pItems[0]);
7612 for(uint64_t i = 1; i < count; ++i)
7614 fprintf(m_File,
" %p", pItems[i]);
7619 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7623 #endif // #if VMA_RECORDING_ENABLED
7628 class VmaAllocationObjectAllocator
7630 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7632 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7634 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7639 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7642 struct VmaCurrentBudgetData
7644 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7645 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7647 #if VMA_MEMORY_BUDGET
7648 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7649 VMA_RW_MUTEX m_BudgetMutex;
7650 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7651 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7652 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7653 #endif // #if VMA_MEMORY_BUDGET
7655 VmaCurrentBudgetData()
7657 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7659 m_BlockBytes[heapIndex] = 0;
7660 m_AllocationBytes[heapIndex] = 0;
7661 #if VMA_MEMORY_BUDGET
7662 m_VulkanUsage[heapIndex] = 0;
7663 m_VulkanBudget[heapIndex] = 0;
7664 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7668 #if VMA_MEMORY_BUDGET
7669 m_OperationsSinceBudgetFetch = 0;
7673 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7675 m_AllocationBytes[heapIndex] += allocationSize;
7676 #if VMA_MEMORY_BUDGET
7677 ++m_OperationsSinceBudgetFetch;
7681 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7683 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7684 m_AllocationBytes[heapIndex] -= allocationSize;
7685 #if VMA_MEMORY_BUDGET
7686 ++m_OperationsSinceBudgetFetch;
7692 struct VmaAllocator_T
7694 VMA_CLASS_NO_COPY(VmaAllocator_T)
7697 uint32_t m_VulkanApiVersion;
7698 bool m_UseKhrDedicatedAllocation;
7699 bool m_UseKhrBindMemory2;
7700 bool m_UseExtMemoryBudget;
7701 bool m_UseAmdDeviceCoherentMemory;
7702 bool m_UseKhrBufferDeviceAddress;
7704 VkInstance m_hInstance;
7705 bool m_AllocationCallbacksSpecified;
7706 VkAllocationCallbacks m_AllocationCallbacks;
7708 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7711 uint32_t m_HeapSizeLimitMask;
7713 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7714 VkPhysicalDeviceMemoryProperties m_MemProps;
7717 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7720 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7721 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7722 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7724 VmaCurrentBudgetData m_Budget;
7730 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7732 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7736 return m_VulkanFunctions;
7739 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7741 VkDeviceSize GetBufferImageGranularity()
const
7744 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7745 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7748 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7749 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7751 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7753 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7754 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7757 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7759 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7760 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7763 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7765 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7766 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7767 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7770 bool IsIntegratedGpu()
const
7772 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7775 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7777 #if VMA_RECORDING_ENABLED
7778 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7781 void GetBufferMemoryRequirements(
7783 VkMemoryRequirements& memReq,
7784 bool& requiresDedicatedAllocation,
7785 bool& prefersDedicatedAllocation)
const;
7786 void GetImageMemoryRequirements(
7788 VkMemoryRequirements& memReq,
7789 bool& requiresDedicatedAllocation,
7790 bool& prefersDedicatedAllocation)
const;
7793 VkResult AllocateMemory(
7794 const VkMemoryRequirements& vkMemReq,
7795 bool requiresDedicatedAllocation,
7796 bool prefersDedicatedAllocation,
7797 VkBuffer dedicatedBuffer,
7798 VkBufferUsageFlags dedicatedBufferUsage,
7799 VkImage dedicatedImage,
7801 VmaSuballocationType suballocType,
7802 size_t allocationCount,
7807 size_t allocationCount,
7810 VkResult ResizeAllocation(
7812 VkDeviceSize newSize);
7814 void CalculateStats(
VmaStats* pStats);
7817 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7819 #if VMA_STATS_STRING_ENABLED
7820 void PrintDetailedMap(
class VmaJsonWriter& json);
7823 VkResult DefragmentationBegin(
7827 VkResult DefragmentationEnd(
7830 VkResult DefragmentationPassBegin(
7833 VkResult DefragmentationPassEnd(
7840 void DestroyPool(
VmaPool pool);
7843 void SetCurrentFrameIndex(uint32_t frameIndex);
7844 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7846 void MakePoolAllocationsLost(
7848 size_t* pLostAllocationCount);
7849 VkResult CheckPoolCorruption(
VmaPool hPool);
7850 VkResult CheckCorruption(uint32_t memoryTypeBits);
7855 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7857 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7859 VkResult BindVulkanBuffer(
7860 VkDeviceMemory memory,
7861 VkDeviceSize memoryOffset,
7865 VkResult BindVulkanImage(
7866 VkDeviceMemory memory,
7867 VkDeviceSize memoryOffset,
7874 VkResult BindBufferMemory(
7876 VkDeviceSize allocationLocalOffset,
7879 VkResult BindImageMemory(
7881 VkDeviceSize allocationLocalOffset,
7885 VkResult FlushOrInvalidateAllocation(
7887 VkDeviceSize offset, VkDeviceSize size,
7888 VMA_CACHE_OPERATION op);
7889 VkResult FlushOrInvalidateAllocations(
7890 uint32_t allocationCount,
7892 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
7893 VMA_CACHE_OPERATION op);
7895 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7901 uint32_t GetGpuDefragmentationMemoryTypeBits();
7904 VkDeviceSize m_PreferredLargeHeapBlockSize;
7906 VkPhysicalDevice m_PhysicalDevice;
7907 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7908 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7910 VMA_RW_MUTEX m_PoolsMutex;
7912 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7913 uint32_t m_NextPoolId;
7918 uint32_t m_GlobalMemoryTypeBits;
7920 #if VMA_RECORDING_ENABLED
7921 VmaRecorder* m_pRecorder;
7926 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
7927 void ImportVulkanFunctions_Static();
7932 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
7933 void ImportVulkanFunctions_Dynamic();
7936 void ValidateVulkanFunctions();
7938 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7940 VkResult AllocateMemoryOfType(
7942 VkDeviceSize alignment,
7943 bool dedicatedAllocation,
7944 VkBuffer dedicatedBuffer,
7945 VkBufferUsageFlags dedicatedBufferUsage,
7946 VkImage dedicatedImage,
7948 uint32_t memTypeIndex,
7949 VmaSuballocationType suballocType,
7950 size_t allocationCount,
7954 VkResult AllocateDedicatedMemoryPage(
7956 VmaSuballocationType suballocType,
7957 uint32_t memTypeIndex,
7958 const VkMemoryAllocateInfo& allocInfo,
7960 bool isUserDataString,
7965 VkResult AllocateDedicatedMemory(
7967 VmaSuballocationType suballocType,
7968 uint32_t memTypeIndex,
7971 bool isUserDataString,
7973 VkBuffer dedicatedBuffer,
7974 VkBufferUsageFlags dedicatedBufferUsage,
7975 VkImage dedicatedImage,
7976 size_t allocationCount,
7985 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7987 uint32_t CalculateGlobalMemoryTypeBits()
const;
7989 bool GetFlushOrInvalidateRange(
7991 VkDeviceSize offset, VkDeviceSize size,
7992 VkMappedMemoryRange& outRange)
const;
7994 #if VMA_MEMORY_BUDGET
7995 void UpdateVulkanBudget();
7996 #endif // #if VMA_MEMORY_BUDGET
8002 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
8004 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
8007 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
8009 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
8012 template<
typename T>
8015 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
8018 template<
typename T>
8019 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
8021 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
8024 template<
typename T>
8025 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
8030 VmaFree(hAllocator, ptr);
8034 template<
typename T>
8035 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
8039 for(
size_t i = count; i--; )
8041 VmaFree(hAllocator, ptr);
8048 #if VMA_STATS_STRING_ENABLED
8050 class VmaStringBuilder
8053 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
8054 size_t GetLength()
const {
return m_Data.size(); }
8055 const char* GetData()
const {
return m_Data.data(); }
8057 void Add(
char ch) { m_Data.push_back(ch); }
8058 void Add(
const char* pStr);
8059 void AddNewLine() { Add(
'\n'); }
8060 void AddNumber(uint32_t num);
8061 void AddNumber(uint64_t num);
8062 void AddPointer(
const void* ptr);
8065 VmaVector< char, VmaStlAllocator<char> > m_Data;
8068 void VmaStringBuilder::Add(
const char* pStr)
8070 const size_t strLen = strlen(pStr);
8073 const size_t oldCount = m_Data.size();
8074 m_Data.resize(oldCount + strLen);
8075 memcpy(m_Data.data() + oldCount, pStr, strLen);
8079 void VmaStringBuilder::AddNumber(uint32_t num)
8086 *--p =
'0' + (num % 10);
8093 void VmaStringBuilder::AddNumber(uint64_t num)
8100 *--p =
'0' + (num % 10);
8107 void VmaStringBuilder::AddPointer(
const void* ptr)
8110 VmaPtrToStr(buf,
sizeof(buf), ptr);
8114 #endif // #if VMA_STATS_STRING_ENABLED
8119 #if VMA_STATS_STRING_ENABLED
8123 VMA_CLASS_NO_COPY(VmaJsonWriter)
8125 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
8128 void BeginObject(
bool singleLine =
false);
8131 void BeginArray(
bool singleLine =
false);
8134 void WriteString(
const char* pStr);
8135 void BeginString(
const char* pStr = VMA_NULL);
8136 void ContinueString(
const char* pStr);
8137 void ContinueString(uint32_t n);
8138 void ContinueString(uint64_t n);
8139 void ContinueString_Pointer(
const void* ptr);
8140 void EndString(
const char* pStr = VMA_NULL);
8142 void WriteNumber(uint32_t n);
8143 void WriteNumber(uint64_t n);
8144 void WriteBool(
bool b);
8148 static const char*
const INDENT;
8150 enum COLLECTION_TYPE
8152 COLLECTION_TYPE_OBJECT,
8153 COLLECTION_TYPE_ARRAY,
8157 COLLECTION_TYPE type;
8158 uint32_t valueCount;
8159 bool singleLineMode;
8162 VmaStringBuilder& m_SB;
8163 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
8164 bool m_InsideString;
8166 void BeginValue(
bool isString);
8167 void WriteIndent(
bool oneLess =
false);
8170 const char*
const VmaJsonWriter::INDENT =
" ";
8172 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
8174 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
8175 m_InsideString(false)
8179 VmaJsonWriter::~VmaJsonWriter()
8181 VMA_ASSERT(!m_InsideString);
8182 VMA_ASSERT(m_Stack.empty());
8185 void VmaJsonWriter::BeginObject(
bool singleLine)
8187 VMA_ASSERT(!m_InsideString);
8193 item.type = COLLECTION_TYPE_OBJECT;
8194 item.valueCount = 0;
8195 item.singleLineMode = singleLine;
8196 m_Stack.push_back(item);
8199 void VmaJsonWriter::EndObject()
8201 VMA_ASSERT(!m_InsideString);
8206 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
8210 void VmaJsonWriter::BeginArray(
bool singleLine)
8212 VMA_ASSERT(!m_InsideString);
8218 item.type = COLLECTION_TYPE_ARRAY;
8219 item.valueCount = 0;
8220 item.singleLineMode = singleLine;
8221 m_Stack.push_back(item);
8224 void VmaJsonWriter::EndArray()
8226 VMA_ASSERT(!m_InsideString);
8231 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
8235 void VmaJsonWriter::WriteString(
const char* pStr)
8241 void VmaJsonWriter::BeginString(
const char* pStr)
8243 VMA_ASSERT(!m_InsideString);
8247 m_InsideString =
true;
8248 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8250 ContinueString(pStr);
8254 void VmaJsonWriter::ContinueString(
const char* pStr)
8256 VMA_ASSERT(m_InsideString);
8258 const size_t strLen = strlen(pStr);
8259 for(
size_t i = 0; i < strLen; ++i)
8292 VMA_ASSERT(0 &&
"Character not currently supported.");
8298 void VmaJsonWriter::ContinueString(uint32_t n)
8300 VMA_ASSERT(m_InsideString);
8304 void VmaJsonWriter::ContinueString(uint64_t n)
8306 VMA_ASSERT(m_InsideString);
8310 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8312 VMA_ASSERT(m_InsideString);
8313 m_SB.AddPointer(ptr);
8316 void VmaJsonWriter::EndString(
const char* pStr)
8318 VMA_ASSERT(m_InsideString);
8319 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8321 ContinueString(pStr);
8324 m_InsideString =
false;
8327 void VmaJsonWriter::WriteNumber(uint32_t n)
8329 VMA_ASSERT(!m_InsideString);
8334 void VmaJsonWriter::WriteNumber(uint64_t n)
8336 VMA_ASSERT(!m_InsideString);
8341 void VmaJsonWriter::WriteBool(
bool b)
8343 VMA_ASSERT(!m_InsideString);
8345 m_SB.Add(b ?
"true" :
"false");
8348 void VmaJsonWriter::WriteNull()
8350 VMA_ASSERT(!m_InsideString);
8355 void VmaJsonWriter::BeginValue(
bool isString)
8357 if(!m_Stack.empty())
8359 StackItem& currItem = m_Stack.back();
8360 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8361 currItem.valueCount % 2 == 0)
8363 VMA_ASSERT(isString);
8366 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8367 currItem.valueCount % 2 != 0)
8371 else if(currItem.valueCount > 0)
8380 ++currItem.valueCount;
8384 void VmaJsonWriter::WriteIndent(
bool oneLess)
8386 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8390 size_t count = m_Stack.size();
8391 if(count > 0 && oneLess)
8395 for(
size_t i = 0; i < count; ++i)
8402 #endif // #if VMA_STATS_STRING_ENABLED
8406 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8408 if(IsUserDataString())
8410 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8412 FreeUserDataString(hAllocator);
8414 if(pUserData != VMA_NULL)
8416 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8421 m_pUserData = pUserData;
8425 void VmaAllocation_T::ChangeBlockAllocation(
8427 VmaDeviceMemoryBlock* block,
8428 VkDeviceSize offset)
8430 VMA_ASSERT(block != VMA_NULL);
8431 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8434 if(block != m_BlockAllocation.m_Block)
8436 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8437 if(IsPersistentMap())
8439 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8440 block->Map(hAllocator, mapRefCount, VMA_NULL);
8443 m_BlockAllocation.m_Block = block;
8444 m_BlockAllocation.m_Offset = offset;
8447 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8449 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8450 m_BlockAllocation.m_Offset = newOffset;
8453 VkDeviceSize VmaAllocation_T::GetOffset()
const
8457 case ALLOCATION_TYPE_BLOCK:
8458 return m_BlockAllocation.m_Offset;
8459 case ALLOCATION_TYPE_DEDICATED:
8467 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8471 case ALLOCATION_TYPE_BLOCK:
8472 return m_BlockAllocation.m_Block->GetDeviceMemory();
8473 case ALLOCATION_TYPE_DEDICATED:
8474 return m_DedicatedAllocation.m_hMemory;
8477 return VK_NULL_HANDLE;
8481 void* VmaAllocation_T::GetMappedData()
const
8485 case ALLOCATION_TYPE_BLOCK:
8488 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8489 VMA_ASSERT(pBlockData != VMA_NULL);
8490 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8497 case ALLOCATION_TYPE_DEDICATED:
8498 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8499 return m_DedicatedAllocation.m_pMappedData;
8506 bool VmaAllocation_T::CanBecomeLost()
const
8510 case ALLOCATION_TYPE_BLOCK:
8511 return m_BlockAllocation.m_CanBecomeLost;
8512 case ALLOCATION_TYPE_DEDICATED:
8520 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8522 VMA_ASSERT(CanBecomeLost());
8528 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8531 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8536 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8542 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8552 #if VMA_STATS_STRING_ENABLED
8555 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8564 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8566 json.WriteString(
"Type");
8567 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8569 json.WriteString(
"Size");
8570 json.WriteNumber(m_Size);
8572 if(m_pUserData != VMA_NULL)
8574 json.WriteString(
"UserData");
8575 if(IsUserDataString())
8577 json.WriteString((
const char*)m_pUserData);
8582 json.ContinueString_Pointer(m_pUserData);
8587 json.WriteString(
"CreationFrameIndex");
8588 json.WriteNumber(m_CreationFrameIndex);
8590 json.WriteString(
"LastUseFrameIndex");
8591 json.WriteNumber(GetLastUseFrameIndex());
8593 if(m_BufferImageUsage != 0)
8595 json.WriteString(
"Usage");
8596 json.WriteNumber(m_BufferImageUsage);
8602 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8604 VMA_ASSERT(IsUserDataString());
8605 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8606 m_pUserData = VMA_NULL;
8609 void VmaAllocation_T::BlockAllocMap()
8611 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8613 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8619 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8623 void VmaAllocation_T::BlockAllocUnmap()
8625 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8627 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8633 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8637 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8639 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8643 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8645 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8646 *ppData = m_DedicatedAllocation.m_pMappedData;
8652 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8653 return VK_ERROR_MEMORY_MAP_FAILED;
8658 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8659 hAllocator->m_hDevice,
8660 m_DedicatedAllocation.m_hMemory,
8665 if(result == VK_SUCCESS)
8667 m_DedicatedAllocation.m_pMappedData = *ppData;
8674 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8676 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8678 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8683 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8684 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8685 hAllocator->m_hDevice,
8686 m_DedicatedAllocation.m_hMemory);
8691 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8695 #if VMA_STATS_STRING_ENABLED
8697 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8701 json.WriteString(
"Blocks");
8704 json.WriteString(
"Allocations");
8707 json.WriteString(
"UnusedRanges");
8710 json.WriteString(
"UsedBytes");
8713 json.WriteString(
"UnusedBytes");
8718 json.WriteString(
"AllocationSize");
8719 json.BeginObject(
true);
8720 json.WriteString(
"Min");
8722 json.WriteString(
"Avg");
8724 json.WriteString(
"Max");
8731 json.WriteString(
"UnusedRangeSize");
8732 json.BeginObject(
true);
8733 json.WriteString(
"Min");
8735 json.WriteString(
"Avg");
8737 json.WriteString(
"Max");
8745 #endif // #if VMA_STATS_STRING_ENABLED
8747 struct VmaSuballocationItemSizeLess
8750 const VmaSuballocationList::iterator lhs,
8751 const VmaSuballocationList::iterator rhs)
const
8753 return lhs->size < rhs->size;
8756 const VmaSuballocationList::iterator lhs,
8757 VkDeviceSize rhsSize)
const
8759 return lhs->size < rhsSize;
8767 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8769 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8773 #if VMA_STATS_STRING_ENABLED
8775 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8776 VkDeviceSize unusedBytes,
8777 size_t allocationCount,
8778 size_t unusedRangeCount)
const
8782 json.WriteString(
"TotalBytes");
8783 json.WriteNumber(GetSize());
8785 json.WriteString(
"UnusedBytes");
8786 json.WriteNumber(unusedBytes);
8788 json.WriteString(
"Allocations");
8789 json.WriteNumber((uint64_t)allocationCount);
8791 json.WriteString(
"UnusedRanges");
8792 json.WriteNumber((uint64_t)unusedRangeCount);
8794 json.WriteString(
"Suballocations");
8798 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8799 VkDeviceSize offset,
8802 json.BeginObject(
true);
8804 json.WriteString(
"Offset");
8805 json.WriteNumber(offset);
8807 hAllocation->PrintParameters(json);
8812 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8813 VkDeviceSize offset,
8814 VkDeviceSize size)
const
8816 json.BeginObject(
true);
8818 json.WriteString(
"Offset");
8819 json.WriteNumber(offset);
8821 json.WriteString(
"Type");
8822 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8824 json.WriteString(
"Size");
8825 json.WriteNumber(size);
8830 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8836 #endif // #if VMA_STATS_STRING_ENABLED
8841 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8842 VmaBlockMetadata(hAllocator),
8845 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8846 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8850 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8854 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8856 VmaBlockMetadata::Init(size);
8859 m_SumFreeSize = size;
8861 VmaSuballocation suballoc = {};
8862 suballoc.offset = 0;
8863 suballoc.size = size;
8864 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8865 suballoc.hAllocation = VK_NULL_HANDLE;
8867 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8868 m_Suballocations.push_back(suballoc);
8869 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8871 m_FreeSuballocationsBySize.push_back(suballocItem);
8874 bool VmaBlockMetadata_Generic::Validate()
const
8876 VMA_VALIDATE(!m_Suballocations.empty());
8879 VkDeviceSize calculatedOffset = 0;
8881 uint32_t calculatedFreeCount = 0;
8883 VkDeviceSize calculatedSumFreeSize = 0;
8886 size_t freeSuballocationsToRegister = 0;
8888 bool prevFree =
false;
8890 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8891 suballocItem != m_Suballocations.cend();
8894 const VmaSuballocation& subAlloc = *suballocItem;
8897 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8899 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8901 VMA_VALIDATE(!prevFree || !currFree);
8903 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8907 calculatedSumFreeSize += subAlloc.size;
8908 ++calculatedFreeCount;
8909 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8911 ++freeSuballocationsToRegister;
8915 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8919 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8920 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8923 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8926 calculatedOffset += subAlloc.size;
8927 prevFree = currFree;
8932 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8934 VkDeviceSize lastSize = 0;
8935 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8937 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8940 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8942 VMA_VALIDATE(suballocItem->size >= lastSize);
8944 lastSize = suballocItem->size;
8948 VMA_VALIDATE(ValidateFreeSuballocationList());
8949 VMA_VALIDATE(calculatedOffset == GetSize());
8950 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8951 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8956 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8958 if(!m_FreeSuballocationsBySize.empty())
8960 return m_FreeSuballocationsBySize.back()->size;
8968 bool VmaBlockMetadata_Generic::IsEmpty()
const
8970 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8973 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8977 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8989 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8990 suballocItem != m_Suballocations.cend();
8993 const VmaSuballocation& suballoc = *suballocItem;
8994 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9007 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
9009 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
9011 inoutStats.
size += GetSize();
9018 #if VMA_STATS_STRING_ENABLED
9020 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
9022 PrintDetailedMap_Begin(json,
9024 m_Suballocations.size() - (
size_t)m_FreeCount,
9028 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
9029 suballocItem != m_Suballocations.cend();
9030 ++suballocItem, ++i)
9032 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9034 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
9038 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
9042 PrintDetailedMap_End(json);
9045 #endif // #if VMA_STATS_STRING_ENABLED
9047 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
9048 uint32_t currentFrameIndex,
9049 uint32_t frameInUseCount,
9050 VkDeviceSize bufferImageGranularity,
9051 VkDeviceSize allocSize,
9052 VkDeviceSize allocAlignment,
9054 VmaSuballocationType allocType,
9055 bool canMakeOtherLost,
9057 VmaAllocationRequest* pAllocationRequest)
9059 VMA_ASSERT(allocSize > 0);
9060 VMA_ASSERT(!upperAddress);
9061 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9062 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9063 VMA_HEAVY_ASSERT(Validate());
9065 pAllocationRequest->type = VmaAllocationRequestType::Normal;
9068 if(canMakeOtherLost ==
false &&
9069 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
9075 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
9076 if(freeSuballocCount > 0)
9081 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9082 m_FreeSuballocationsBySize.data(),
9083 m_FreeSuballocationsBySize.data() + freeSuballocCount,
9084 allocSize + 2 * VMA_DEBUG_MARGIN,
9085 VmaSuballocationItemSizeLess());
9086 size_t index = it - m_FreeSuballocationsBySize.data();
9087 for(; index < freeSuballocCount; ++index)
9092 bufferImageGranularity,
9096 m_FreeSuballocationsBySize[index],
9098 &pAllocationRequest->offset,
9099 &pAllocationRequest->itemsToMakeLostCount,
9100 &pAllocationRequest->sumFreeSize,
9101 &pAllocationRequest->sumItemSize))
9103 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9108 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
9110 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9111 it != m_Suballocations.end();
9114 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
9117 bufferImageGranularity,
9123 &pAllocationRequest->offset,
9124 &pAllocationRequest->itemsToMakeLostCount,
9125 &pAllocationRequest->sumFreeSize,
9126 &pAllocationRequest->sumItemSize))
9128 pAllocationRequest->item = it;
9136 for(
size_t index = freeSuballocCount; index--; )
9141 bufferImageGranularity,
9145 m_FreeSuballocationsBySize[index],
9147 &pAllocationRequest->offset,
9148 &pAllocationRequest->itemsToMakeLostCount,
9149 &pAllocationRequest->sumFreeSize,
9150 &pAllocationRequest->sumItemSize))
9152 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
9159 if(canMakeOtherLost)
9164 VmaAllocationRequest tmpAllocRequest = {};
9165 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
9166 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
9167 suballocIt != m_Suballocations.end();
9170 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
9171 suballocIt->hAllocation->CanBecomeLost())
9176 bufferImageGranularity,
9182 &tmpAllocRequest.offset,
9183 &tmpAllocRequest.itemsToMakeLostCount,
9184 &tmpAllocRequest.sumFreeSize,
9185 &tmpAllocRequest.sumItemSize))
9189 *pAllocationRequest = tmpAllocRequest;
9190 pAllocationRequest->item = suballocIt;
9193 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
9195 *pAllocationRequest = tmpAllocRequest;
9196 pAllocationRequest->item = suballocIt;
9209 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
9210 uint32_t currentFrameIndex,
9211 uint32_t frameInUseCount,
9212 VmaAllocationRequest* pAllocationRequest)
9214 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
9216 while(pAllocationRequest->itemsToMakeLostCount > 0)
9218 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
9220 ++pAllocationRequest->item;
9222 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9223 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
9224 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
9225 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9227 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
9228 --pAllocationRequest->itemsToMakeLostCount;
9236 VMA_HEAVY_ASSERT(Validate());
9237 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
9238 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
9243 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9245 uint32_t lostAllocationCount = 0;
9246 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9247 it != m_Suballocations.end();
9250 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
9251 it->hAllocation->CanBecomeLost() &&
9252 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9254 it = FreeSuballocation(it);
9255 ++lostAllocationCount;
9258 return lostAllocationCount;
9261 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
9263 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
9264 it != m_Suballocations.end();
9267 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
9269 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9271 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9272 return VK_ERROR_VALIDATION_FAILED_EXT;
9274 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9276 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9277 return VK_ERROR_VALIDATION_FAILED_EXT;
9285 void VmaBlockMetadata_Generic::Alloc(
9286 const VmaAllocationRequest& request,
9287 VmaSuballocationType type,
9288 VkDeviceSize allocSize,
9291 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9292 VMA_ASSERT(request.item != m_Suballocations.end());
9293 VmaSuballocation& suballoc = *request.item;
9295 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9297 VMA_ASSERT(request.offset >= suballoc.offset);
9298 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9299 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9300 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9304 UnregisterFreeSuballocation(request.item);
9306 suballoc.offset = request.offset;
9307 suballoc.size = allocSize;
9308 suballoc.type = type;
9309 suballoc.hAllocation = hAllocation;
9314 VmaSuballocation paddingSuballoc = {};
9315 paddingSuballoc.offset = request.offset + allocSize;
9316 paddingSuballoc.size = paddingEnd;
9317 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9318 VmaSuballocationList::iterator next = request.item;
9320 const VmaSuballocationList::iterator paddingEndItem =
9321 m_Suballocations.insert(next, paddingSuballoc);
9322 RegisterFreeSuballocation(paddingEndItem);
9328 VmaSuballocation paddingSuballoc = {};
9329 paddingSuballoc.offset = request.offset - paddingBegin;
9330 paddingSuballoc.size = paddingBegin;
9331 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9332 const VmaSuballocationList::iterator paddingBeginItem =
9333 m_Suballocations.insert(request.item, paddingSuballoc);
9334 RegisterFreeSuballocation(paddingBeginItem);
9338 m_FreeCount = m_FreeCount - 1;
9339 if(paddingBegin > 0)
9347 m_SumFreeSize -= allocSize;
9350 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9352 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9353 suballocItem != m_Suballocations.end();
9356 VmaSuballocation& suballoc = *suballocItem;
9357 if(suballoc.hAllocation == allocation)
9359 FreeSuballocation(suballocItem);
9360 VMA_HEAVY_ASSERT(Validate());
9364 VMA_ASSERT(0 &&
"Not found!");
9367 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9369 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9370 suballocItem != m_Suballocations.end();
9373 VmaSuballocation& suballoc = *suballocItem;
9374 if(suballoc.offset == offset)
9376 FreeSuballocation(suballocItem);
9380 VMA_ASSERT(0 &&
"Not found!");
9383 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9385 VkDeviceSize lastSize = 0;
9386 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9388 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9390 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9391 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9392 VMA_VALIDATE(it->size >= lastSize);
9393 lastSize = it->size;
9398 bool VmaBlockMetadata_Generic::CheckAllocation(
9399 uint32_t currentFrameIndex,
9400 uint32_t frameInUseCount,
9401 VkDeviceSize bufferImageGranularity,
9402 VkDeviceSize allocSize,
9403 VkDeviceSize allocAlignment,
9404 VmaSuballocationType allocType,
9405 VmaSuballocationList::const_iterator suballocItem,
9406 bool canMakeOtherLost,
9407 VkDeviceSize* pOffset,
9408 size_t* itemsToMakeLostCount,
9409 VkDeviceSize* pSumFreeSize,
9410 VkDeviceSize* pSumItemSize)
const
9412 VMA_ASSERT(allocSize > 0);
9413 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9414 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9415 VMA_ASSERT(pOffset != VMA_NULL);
9417 *itemsToMakeLostCount = 0;
9421 if(canMakeOtherLost)
9423 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9425 *pSumFreeSize = suballocItem->size;
9429 if(suballocItem->hAllocation->CanBecomeLost() &&
9430 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9432 ++*itemsToMakeLostCount;
9433 *pSumItemSize = suballocItem->size;
9442 if(GetSize() - suballocItem->offset < allocSize)
9448 *pOffset = suballocItem->offset;
9451 if(VMA_DEBUG_MARGIN > 0)
9453 *pOffset += VMA_DEBUG_MARGIN;
9457 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9461 if(bufferImageGranularity > 1)
9463 bool bufferImageGranularityConflict =
false;
9464 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9465 while(prevSuballocItem != m_Suballocations.cbegin())
9468 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9469 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9471 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9473 bufferImageGranularityConflict =
true;
9481 if(bufferImageGranularityConflict)
9483 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9489 if(*pOffset >= suballocItem->offset + suballocItem->size)
9495 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9498 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9500 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9502 if(suballocItem->offset + totalSize > GetSize())
9509 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9510 if(totalSize > suballocItem->size)
9512 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9513 while(remainingSize > 0)
9516 if(lastSuballocItem == m_Suballocations.cend())
9520 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9522 *pSumFreeSize += lastSuballocItem->size;
9526 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9527 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9528 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9530 ++*itemsToMakeLostCount;
9531 *pSumItemSize += lastSuballocItem->size;
9538 remainingSize = (lastSuballocItem->size < remainingSize) ?
9539 remainingSize - lastSuballocItem->size : 0;
9545 if(bufferImageGranularity > 1)
9547 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9549 while(nextSuballocItem != m_Suballocations.cend())
9551 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9552 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9554 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9556 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9557 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9558 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9560 ++*itemsToMakeLostCount;
9579 const VmaSuballocation& suballoc = *suballocItem;
9580 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9582 *pSumFreeSize = suballoc.size;
9585 if(suballoc.size < allocSize)
9591 *pOffset = suballoc.offset;
9594 if(VMA_DEBUG_MARGIN > 0)
9596 *pOffset += VMA_DEBUG_MARGIN;
9600 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9604 if(bufferImageGranularity > 1)
9606 bool bufferImageGranularityConflict =
false;
9607 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9608 while(prevSuballocItem != m_Suballocations.cbegin())
9611 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9612 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9614 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9616 bufferImageGranularityConflict =
true;
9624 if(bufferImageGranularityConflict)
9626 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9631 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9634 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9637 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9644 if(bufferImageGranularity > 1)
9646 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9648 while(nextSuballocItem != m_Suballocations.cend())
9650 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9651 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9653 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9672 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9674 VMA_ASSERT(item != m_Suballocations.end());
9675 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9677 VmaSuballocationList::iterator nextItem = item;
9679 VMA_ASSERT(nextItem != m_Suballocations.end());
9680 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9682 item->size += nextItem->size;
9684 m_Suballocations.erase(nextItem);
9687 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9690 VmaSuballocation& suballoc = *suballocItem;
9691 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9692 suballoc.hAllocation = VK_NULL_HANDLE;
9696 m_SumFreeSize += suballoc.size;
9699 bool mergeWithNext =
false;
9700 bool mergeWithPrev =
false;
9702 VmaSuballocationList::iterator nextItem = suballocItem;
9704 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9706 mergeWithNext =
true;
9709 VmaSuballocationList::iterator prevItem = suballocItem;
9710 if(suballocItem != m_Suballocations.begin())
9713 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9715 mergeWithPrev =
true;
9721 UnregisterFreeSuballocation(nextItem);
9722 MergeFreeWithNext(suballocItem);
9727 UnregisterFreeSuballocation(prevItem);
9728 MergeFreeWithNext(prevItem);
9729 RegisterFreeSuballocation(prevItem);
9734 RegisterFreeSuballocation(suballocItem);
9735 return suballocItem;
9739 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9741 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9742 VMA_ASSERT(item->size > 0);
9746 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9748 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9750 if(m_FreeSuballocationsBySize.empty())
9752 m_FreeSuballocationsBySize.push_back(item);
9756 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9764 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9766 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9767 VMA_ASSERT(item->size > 0);
9771 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9773 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9775 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9776 m_FreeSuballocationsBySize.data(),
9777 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9779 VmaSuballocationItemSizeLess());
9780 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9781 index < m_FreeSuballocationsBySize.size();
9784 if(m_FreeSuballocationsBySize[index] == item)
9786 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9789 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9791 VMA_ASSERT(0 &&
"Not found.");
9797 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9798 VkDeviceSize bufferImageGranularity,
9799 VmaSuballocationType& inOutPrevSuballocType)
const
9801 if(bufferImageGranularity == 1 || IsEmpty())
9806 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9807 bool typeConflictFound =
false;
9808 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9809 it != m_Suballocations.cend();
9812 const VmaSuballocationType suballocType = it->type;
9813 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9815 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9816 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9818 typeConflictFound =
true;
9820 inOutPrevSuballocType = suballocType;
9824 return typeConflictFound || minAlignment >= bufferImageGranularity;
9830 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9831 VmaBlockMetadata(hAllocator),
9833 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9834 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9835 m_1stVectorIndex(0),
9836 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9837 m_1stNullItemsBeginCount(0),
9838 m_1stNullItemsMiddleCount(0),
9839 m_2ndNullItemsCount(0)
9843 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9847 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9849 VmaBlockMetadata::Init(size);
9850 m_SumFreeSize = size;
9853 bool VmaBlockMetadata_Linear::Validate()
const
9855 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9856 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9858 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9859 VMA_VALIDATE(!suballocations1st.empty() ||
9860 suballocations2nd.empty() ||
9861 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9863 if(!suballocations1st.empty())
9866 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9868 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9870 if(!suballocations2nd.empty())
9873 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9876 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9877 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9879 VkDeviceSize sumUsedSize = 0;
9880 const size_t suballoc1stCount = suballocations1st.size();
9881 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9883 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9885 const size_t suballoc2ndCount = suballocations2nd.size();
9886 size_t nullItem2ndCount = 0;
9887 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9889 const VmaSuballocation& suballoc = suballocations2nd[i];
9890 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9892 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9893 VMA_VALIDATE(suballoc.offset >= offset);
9897 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9898 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9899 sumUsedSize += suballoc.size;
9906 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9909 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9912 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9914 const VmaSuballocation& suballoc = suballocations1st[i];
9915 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9916 suballoc.hAllocation == VK_NULL_HANDLE);
9919 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9921 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9923 const VmaSuballocation& suballoc = suballocations1st[i];
9924 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9926 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9927 VMA_VALIDATE(suballoc.offset >= offset);
9928 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9932 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9933 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9934 sumUsedSize += suballoc.size;
9941 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9943 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9945 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9947 const size_t suballoc2ndCount = suballocations2nd.size();
9948 size_t nullItem2ndCount = 0;
9949 for(
size_t i = suballoc2ndCount; i--; )
9951 const VmaSuballocation& suballoc = suballocations2nd[i];
9952 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9954 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9955 VMA_VALIDATE(suballoc.offset >= offset);
9959 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9960 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9961 sumUsedSize += suballoc.size;
9968 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9971 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9974 VMA_VALIDATE(offset <= GetSize());
9975 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9980 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9982 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9983 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9986 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9988 const VkDeviceSize size = GetSize();
10000 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10002 switch(m_2ndVectorMode)
10004 case SECOND_VECTOR_EMPTY:
10010 const size_t suballocations1stCount = suballocations1st.size();
10011 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
10012 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10013 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
10015 firstSuballoc.offset,
10016 size - (lastSuballoc.offset + lastSuballoc.size));
10020 case SECOND_VECTOR_RING_BUFFER:
10025 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10026 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
10027 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
10028 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
10032 case SECOND_VECTOR_DOUBLE_STACK:
10037 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10038 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
10039 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
10040 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
10050 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10052 const VkDeviceSize size = GetSize();
10053 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10054 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10055 const size_t suballoc1stCount = suballocations1st.size();
10056 const size_t suballoc2ndCount = suballocations2nd.size();
10067 VkDeviceSize lastOffset = 0;
10069 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10071 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10072 size_t nextAlloc2ndIndex = 0;
10073 while(lastOffset < freeSpace2ndTo1stEnd)
10076 while(nextAlloc2ndIndex < suballoc2ndCount &&
10077 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10079 ++nextAlloc2ndIndex;
10083 if(nextAlloc2ndIndex < suballoc2ndCount)
10085 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10088 if(lastOffset < suballoc.offset)
10091 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10105 lastOffset = suballoc.offset + suballoc.size;
10106 ++nextAlloc2ndIndex;
10112 if(lastOffset < freeSpace2ndTo1stEnd)
10114 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10122 lastOffset = freeSpace2ndTo1stEnd;
10127 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10128 const VkDeviceSize freeSpace1stTo2ndEnd =
10129 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10130 while(lastOffset < freeSpace1stTo2ndEnd)
10133 while(nextAlloc1stIndex < suballoc1stCount &&
10134 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10136 ++nextAlloc1stIndex;
10140 if(nextAlloc1stIndex < suballoc1stCount)
10142 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10145 if(lastOffset < suballoc.offset)
10148 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10162 lastOffset = suballoc.offset + suballoc.size;
10163 ++nextAlloc1stIndex;
10169 if(lastOffset < freeSpace1stTo2ndEnd)
10171 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10179 lastOffset = freeSpace1stTo2ndEnd;
10183 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10185 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10186 while(lastOffset < size)
10189 while(nextAlloc2ndIndex != SIZE_MAX &&
10190 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10192 --nextAlloc2ndIndex;
10196 if(nextAlloc2ndIndex != SIZE_MAX)
10198 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10201 if(lastOffset < suballoc.offset)
10204 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10218 lastOffset = suballoc.offset + suballoc.size;
10219 --nextAlloc2ndIndex;
10225 if(lastOffset < size)
10227 const VkDeviceSize unusedRangeSize = size - lastOffset;
10243 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
10245 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10246 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10247 const VkDeviceSize size = GetSize();
10248 const size_t suballoc1stCount = suballocations1st.size();
10249 const size_t suballoc2ndCount = suballocations2nd.size();
10251 inoutStats.
size += size;
10253 VkDeviceSize lastOffset = 0;
10255 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10257 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10258 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
10259 while(lastOffset < freeSpace2ndTo1stEnd)
10262 while(nextAlloc2ndIndex < suballoc2ndCount &&
10263 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10265 ++nextAlloc2ndIndex;
10269 if(nextAlloc2ndIndex < suballoc2ndCount)
10271 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10274 if(lastOffset < suballoc.offset)
10277 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10288 lastOffset = suballoc.offset + suballoc.size;
10289 ++nextAlloc2ndIndex;
10294 if(lastOffset < freeSpace2ndTo1stEnd)
10297 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10304 lastOffset = freeSpace2ndTo1stEnd;
10309 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10310 const VkDeviceSize freeSpace1stTo2ndEnd =
10311 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10312 while(lastOffset < freeSpace1stTo2ndEnd)
10315 while(nextAlloc1stIndex < suballoc1stCount &&
10316 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10318 ++nextAlloc1stIndex;
10322 if(nextAlloc1stIndex < suballoc1stCount)
10324 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10327 if(lastOffset < suballoc.offset)
10330 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10341 lastOffset = suballoc.offset + suballoc.size;
10342 ++nextAlloc1stIndex;
10347 if(lastOffset < freeSpace1stTo2ndEnd)
10350 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10357 lastOffset = freeSpace1stTo2ndEnd;
10361 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10363 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10364 while(lastOffset < size)
10367 while(nextAlloc2ndIndex != SIZE_MAX &&
10368 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10370 --nextAlloc2ndIndex;
10374 if(nextAlloc2ndIndex != SIZE_MAX)
10376 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10379 if(lastOffset < suballoc.offset)
10382 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10393 lastOffset = suballoc.offset + suballoc.size;
10394 --nextAlloc2ndIndex;
10399 if(lastOffset < size)
10402 const VkDeviceSize unusedRangeSize = size - lastOffset;
10415 #if VMA_STATS_STRING_ENABLED
10416 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10418 const VkDeviceSize size = GetSize();
10419 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10420 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10421 const size_t suballoc1stCount = suballocations1st.size();
10422 const size_t suballoc2ndCount = suballocations2nd.size();
10426 size_t unusedRangeCount = 0;
10427 VkDeviceSize usedBytes = 0;
10429 VkDeviceSize lastOffset = 0;
10431 size_t alloc2ndCount = 0;
10432 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10434 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10435 size_t nextAlloc2ndIndex = 0;
10436 while(lastOffset < freeSpace2ndTo1stEnd)
10439 while(nextAlloc2ndIndex < suballoc2ndCount &&
10440 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10442 ++nextAlloc2ndIndex;
10446 if(nextAlloc2ndIndex < suballoc2ndCount)
10448 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10451 if(lastOffset < suballoc.offset)
10454 ++unusedRangeCount;
10460 usedBytes += suballoc.size;
10463 lastOffset = suballoc.offset + suballoc.size;
10464 ++nextAlloc2ndIndex;
10469 if(lastOffset < freeSpace2ndTo1stEnd)
10472 ++unusedRangeCount;
10476 lastOffset = freeSpace2ndTo1stEnd;
10481 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10482 size_t alloc1stCount = 0;
10483 const VkDeviceSize freeSpace1stTo2ndEnd =
10484 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10485 while(lastOffset < freeSpace1stTo2ndEnd)
10488 while(nextAlloc1stIndex < suballoc1stCount &&
10489 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10491 ++nextAlloc1stIndex;
10495 if(nextAlloc1stIndex < suballoc1stCount)
10497 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10500 if(lastOffset < suballoc.offset)
10503 ++unusedRangeCount;
10509 usedBytes += suballoc.size;
10512 lastOffset = suballoc.offset + suballoc.size;
10513 ++nextAlloc1stIndex;
10518 if(lastOffset < size)
10521 ++unusedRangeCount;
10525 lastOffset = freeSpace1stTo2ndEnd;
10529 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10531 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10532 while(lastOffset < size)
10535 while(nextAlloc2ndIndex != SIZE_MAX &&
10536 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10538 --nextAlloc2ndIndex;
10542 if(nextAlloc2ndIndex != SIZE_MAX)
10544 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10547 if(lastOffset < suballoc.offset)
10550 ++unusedRangeCount;
10556 usedBytes += suballoc.size;
10559 lastOffset = suballoc.offset + suballoc.size;
10560 --nextAlloc2ndIndex;
10565 if(lastOffset < size)
10568 ++unusedRangeCount;
10577 const VkDeviceSize unusedBytes = size - usedBytes;
10578 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10583 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10585 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10586 size_t nextAlloc2ndIndex = 0;
10587 while(lastOffset < freeSpace2ndTo1stEnd)
10590 while(nextAlloc2ndIndex < suballoc2ndCount &&
10591 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10593 ++nextAlloc2ndIndex;
10597 if(nextAlloc2ndIndex < suballoc2ndCount)
10599 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10602 if(lastOffset < suballoc.offset)
10605 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10606 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10611 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10614 lastOffset = suballoc.offset + suballoc.size;
10615 ++nextAlloc2ndIndex;
10620 if(lastOffset < freeSpace2ndTo1stEnd)
10623 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10624 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10628 lastOffset = freeSpace2ndTo1stEnd;
10633 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10634 while(lastOffset < freeSpace1stTo2ndEnd)
10637 while(nextAlloc1stIndex < suballoc1stCount &&
10638 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10640 ++nextAlloc1stIndex;
10644 if(nextAlloc1stIndex < suballoc1stCount)
10646 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10649 if(lastOffset < suballoc.offset)
10652 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10653 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10658 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10661 lastOffset = suballoc.offset + suballoc.size;
10662 ++nextAlloc1stIndex;
10667 if(lastOffset < freeSpace1stTo2ndEnd)
10670 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10671 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10675 lastOffset = freeSpace1stTo2ndEnd;
10679 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10681 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10682 while(lastOffset < size)
10685 while(nextAlloc2ndIndex != SIZE_MAX &&
10686 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10688 --nextAlloc2ndIndex;
10692 if(nextAlloc2ndIndex != SIZE_MAX)
10694 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10697 if(lastOffset < suballoc.offset)
10700 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10701 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10706 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10709 lastOffset = suballoc.offset + suballoc.size;
10710 --nextAlloc2ndIndex;
10715 if(lastOffset < size)
10718 const VkDeviceSize unusedRangeSize = size - lastOffset;
10719 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10728 PrintDetailedMap_End(json);
10730 #endif // #if VMA_STATS_STRING_ENABLED
10732 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10733 uint32_t currentFrameIndex,
10734 uint32_t frameInUseCount,
10735 VkDeviceSize bufferImageGranularity,
10736 VkDeviceSize allocSize,
10737 VkDeviceSize allocAlignment,
10739 VmaSuballocationType allocType,
10740 bool canMakeOtherLost,
10742 VmaAllocationRequest* pAllocationRequest)
10744 VMA_ASSERT(allocSize > 0);
10745 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10746 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10747 VMA_HEAVY_ASSERT(Validate());
10748 return upperAddress ?
10749 CreateAllocationRequest_UpperAddress(
10750 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10751 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10752 CreateAllocationRequest_LowerAddress(
10753 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10754 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10757 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10758 uint32_t currentFrameIndex,
10759 uint32_t frameInUseCount,
10760 VkDeviceSize bufferImageGranularity,
10761 VkDeviceSize allocSize,
10762 VkDeviceSize allocAlignment,
10763 VmaSuballocationType allocType,
10764 bool canMakeOtherLost,
10766 VmaAllocationRequest* pAllocationRequest)
10768 const VkDeviceSize size = GetSize();
10769 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10770 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10772 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10774 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10779 if(allocSize > size)
10783 VkDeviceSize resultBaseOffset = size - allocSize;
10784 if(!suballocations2nd.empty())
10786 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10787 resultBaseOffset = lastSuballoc.offset - allocSize;
10788 if(allocSize > lastSuballoc.offset)
10795 VkDeviceSize resultOffset = resultBaseOffset;
10798 if(VMA_DEBUG_MARGIN > 0)
10800 if(resultOffset < VMA_DEBUG_MARGIN)
10804 resultOffset -= VMA_DEBUG_MARGIN;
10808 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10812 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10814 bool bufferImageGranularityConflict =
false;
10815 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10817 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10818 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10820 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10822 bufferImageGranularityConflict =
true;
10830 if(bufferImageGranularityConflict)
10832 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10837 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10838 suballocations1st.back().offset + suballocations1st.back().size :
10840 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10844 if(bufferImageGranularity > 1)
10846 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10848 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10849 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10851 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10865 pAllocationRequest->offset = resultOffset;
10866 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10867 pAllocationRequest->sumItemSize = 0;
10869 pAllocationRequest->itemsToMakeLostCount = 0;
10870 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10877 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10878 uint32_t currentFrameIndex,
10879 uint32_t frameInUseCount,
10880 VkDeviceSize bufferImageGranularity,
10881 VkDeviceSize allocSize,
10882 VkDeviceSize allocAlignment,
10883 VmaSuballocationType allocType,
10884 bool canMakeOtherLost,
10886 VmaAllocationRequest* pAllocationRequest)
10888 const VkDeviceSize size = GetSize();
10889 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10890 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10892 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10896 VkDeviceSize resultBaseOffset = 0;
10897 if(!suballocations1st.empty())
10899 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10900 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10904 VkDeviceSize resultOffset = resultBaseOffset;
10907 if(VMA_DEBUG_MARGIN > 0)
10909 resultOffset += VMA_DEBUG_MARGIN;
10913 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10917 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10919 bool bufferImageGranularityConflict =
false;
10920 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10922 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10923 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10925 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10927 bufferImageGranularityConflict =
true;
10935 if(bufferImageGranularityConflict)
10937 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10941 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10942 suballocations2nd.back().offset : size;
10945 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10949 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10951 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10953 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10954 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10956 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10970 pAllocationRequest->offset = resultOffset;
10971 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10972 pAllocationRequest->sumItemSize = 0;
10974 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10975 pAllocationRequest->itemsToMakeLostCount = 0;
10982 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10984 VMA_ASSERT(!suballocations1st.empty());
10986 VkDeviceSize resultBaseOffset = 0;
10987 if(!suballocations2nd.empty())
10989 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10990 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10994 VkDeviceSize resultOffset = resultBaseOffset;
10997 if(VMA_DEBUG_MARGIN > 0)
10999 resultOffset += VMA_DEBUG_MARGIN;
11003 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
11007 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
11009 bool bufferImageGranularityConflict =
false;
11010 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
11012 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
11013 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
11015 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
11017 bufferImageGranularityConflict =
true;
11025 if(bufferImageGranularityConflict)
11027 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
11031 pAllocationRequest->itemsToMakeLostCount = 0;
11032 pAllocationRequest->sumItemSize = 0;
11033 size_t index1st = m_1stNullItemsBeginCount;
11035 if(canMakeOtherLost)
11037 while(index1st < suballocations1st.size() &&
11038 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
11041 const VmaSuballocation& suballoc = suballocations1st[index1st];
11042 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
11048 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11049 if(suballoc.hAllocation->CanBecomeLost() &&
11050 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11052 ++pAllocationRequest->itemsToMakeLostCount;
11053 pAllocationRequest->sumItemSize += suballoc.size;
11065 if(bufferImageGranularity > 1)
11067 while(index1st < suballocations1st.size())
11069 const VmaSuballocation& suballoc = suballocations1st[index1st];
11070 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
11072 if(suballoc.hAllocation != VK_NULL_HANDLE)
11075 if(suballoc.hAllocation->CanBecomeLost() &&
11076 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
11078 ++pAllocationRequest->itemsToMakeLostCount;
11079 pAllocationRequest->sumItemSize += suballoc.size;
11097 if(index1st == suballocations1st.size() &&
11098 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
11101 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
11106 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
11107 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
11111 if(bufferImageGranularity > 1)
11113 for(
size_t nextSuballocIndex = index1st;
11114 nextSuballocIndex < suballocations1st.size();
11115 nextSuballocIndex++)
11117 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
11118 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
11120 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
11134 pAllocationRequest->offset = resultOffset;
11135 pAllocationRequest->sumFreeSize =
11136 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
11138 - pAllocationRequest->sumItemSize;
11139 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
11148 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
11149 uint32_t currentFrameIndex,
11150 uint32_t frameInUseCount,
11151 VmaAllocationRequest* pAllocationRequest)
11153 if(pAllocationRequest->itemsToMakeLostCount == 0)
11158 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
11161 SuballocationVectorType* suballocations = &AccessSuballocations1st();
11162 size_t index = m_1stNullItemsBeginCount;
11163 size_t madeLostCount = 0;
11164 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
11166 if(index == suballocations->size())
11170 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11172 suballocations = &AccessSuballocations2nd();
11176 VMA_ASSERT(!suballocations->empty());
11178 VmaSuballocation& suballoc = (*suballocations)[index];
11179 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11181 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
11182 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
11183 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11185 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11186 suballoc.hAllocation = VK_NULL_HANDLE;
11187 m_SumFreeSize += suballoc.size;
11188 if(suballocations == &AccessSuballocations1st())
11190 ++m_1stNullItemsMiddleCount;
11194 ++m_2ndNullItemsCount;
11206 CleanupAfterFree();
11212 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11214 uint32_t lostAllocationCount = 0;
11216 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11217 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11219 VmaSuballocation& suballoc = suballocations1st[i];
11220 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11221 suballoc.hAllocation->CanBecomeLost() &&
11222 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11224 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11225 suballoc.hAllocation = VK_NULL_HANDLE;
11226 ++m_1stNullItemsMiddleCount;
11227 m_SumFreeSize += suballoc.size;
11228 ++lostAllocationCount;
11232 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11233 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11235 VmaSuballocation& suballoc = suballocations2nd[i];
11236 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
11237 suballoc.hAllocation->CanBecomeLost() &&
11238 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
11240 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11241 suballoc.hAllocation = VK_NULL_HANDLE;
11242 ++m_2ndNullItemsCount;
11243 m_SumFreeSize += suballoc.size;
11244 ++lostAllocationCount;
11248 if(lostAllocationCount)
11250 CleanupAfterFree();
11253 return lostAllocationCount;
11256 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
11258 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11259 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
11261 const VmaSuballocation& suballoc = suballocations1st[i];
11262 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11264 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11266 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11267 return VK_ERROR_VALIDATION_FAILED_EXT;
11269 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11271 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11272 return VK_ERROR_VALIDATION_FAILED_EXT;
11277 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11278 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11280 const VmaSuballocation& suballoc = suballocations2nd[i];
11281 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11283 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11285 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11286 return VK_ERROR_VALIDATION_FAILED_EXT;
11288 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11290 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11291 return VK_ERROR_VALIDATION_FAILED_EXT;
11299 void VmaBlockMetadata_Linear::Alloc(
11300 const VmaAllocationRequest& request,
11301 VmaSuballocationType type,
11302 VkDeviceSize allocSize,
11305 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11307 switch(request.type)
11309 case VmaAllocationRequestType::UpperAddress:
11311 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11312 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11313 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11314 suballocations2nd.push_back(newSuballoc);
11315 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11318 case VmaAllocationRequestType::EndOf1st:
11320 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11322 VMA_ASSERT(suballocations1st.empty() ||
11323 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11325 VMA_ASSERT(request.offset + allocSize <= GetSize());
11327 suballocations1st.push_back(newSuballoc);
11330 case VmaAllocationRequestType::EndOf2nd:
11332 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11334 VMA_ASSERT(!suballocations1st.empty() &&
11335 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11336 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11338 switch(m_2ndVectorMode)
11340 case SECOND_VECTOR_EMPTY:
11342 VMA_ASSERT(suballocations2nd.empty());
11343 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11345 case SECOND_VECTOR_RING_BUFFER:
11347 VMA_ASSERT(!suballocations2nd.empty());
11349 case SECOND_VECTOR_DOUBLE_STACK:
11350 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11356 suballocations2nd.push_back(newSuballoc);
11360 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11363 m_SumFreeSize -= newSuballoc.size;
11366 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11368 FreeAtOffset(allocation->GetOffset());
11371 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11373 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11374 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11376 if(!suballocations1st.empty())
11379 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11380 if(firstSuballoc.offset == offset)
11382 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11383 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11384 m_SumFreeSize += firstSuballoc.size;
11385 ++m_1stNullItemsBeginCount;
11386 CleanupAfterFree();
11392 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11393 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11395 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11396 if(lastSuballoc.offset == offset)
11398 m_SumFreeSize += lastSuballoc.size;
11399 suballocations2nd.pop_back();
11400 CleanupAfterFree();
11405 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11407 VmaSuballocation& lastSuballoc = suballocations1st.back();
11408 if(lastSuballoc.offset == offset)
11410 m_SumFreeSize += lastSuballoc.size;
11411 suballocations1st.pop_back();
11412 CleanupAfterFree();
11419 VmaSuballocation refSuballoc;
11420 refSuballoc.offset = offset;
11422 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11423 suballocations1st.begin() + m_1stNullItemsBeginCount,
11424 suballocations1st.end(),
11426 VmaSuballocationOffsetLess());
11427 if(it != suballocations1st.end())
11429 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11430 it->hAllocation = VK_NULL_HANDLE;
11431 ++m_1stNullItemsMiddleCount;
11432 m_SumFreeSize += it->size;
11433 CleanupAfterFree();
11438 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11441 VmaSuballocation refSuballoc;
11442 refSuballoc.offset = offset;
11444 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11445 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11446 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11447 if(it != suballocations2nd.end())
11449 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11450 it->hAllocation = VK_NULL_HANDLE;
11451 ++m_2ndNullItemsCount;
11452 m_SumFreeSize += it->size;
11453 CleanupAfterFree();
11458 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11461 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11463 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11464 const size_t suballocCount = AccessSuballocations1st().size();
11465 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11468 void VmaBlockMetadata_Linear::CleanupAfterFree()
11470 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11471 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11475 suballocations1st.clear();
11476 suballocations2nd.clear();
11477 m_1stNullItemsBeginCount = 0;
11478 m_1stNullItemsMiddleCount = 0;
11479 m_2ndNullItemsCount = 0;
11480 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11484 const size_t suballoc1stCount = suballocations1st.size();
11485 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11486 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11489 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11490 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11492 ++m_1stNullItemsBeginCount;
11493 --m_1stNullItemsMiddleCount;
11497 while(m_1stNullItemsMiddleCount > 0 &&
11498 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11500 --m_1stNullItemsMiddleCount;
11501 suballocations1st.pop_back();
11505 while(m_2ndNullItemsCount > 0 &&
11506 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11508 --m_2ndNullItemsCount;
11509 suballocations2nd.pop_back();
11513 while(m_2ndNullItemsCount > 0 &&
11514 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11516 --m_2ndNullItemsCount;
11517 VmaVectorRemove(suballocations2nd, 0);
11520 if(ShouldCompact1st())
11522 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11523 size_t srcIndex = m_1stNullItemsBeginCount;
11524 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11526 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11530 if(dstIndex != srcIndex)
11532 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11536 suballocations1st.resize(nonNullItemCount);
11537 m_1stNullItemsBeginCount = 0;
11538 m_1stNullItemsMiddleCount = 0;
11542 if(suballocations2nd.empty())
11544 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11548 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11550 suballocations1st.clear();
11551 m_1stNullItemsBeginCount = 0;
11553 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11556 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11557 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11558 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11559 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11561 ++m_1stNullItemsBeginCount;
11562 --m_1stNullItemsMiddleCount;
11564 m_2ndNullItemsCount = 0;
11565 m_1stVectorIndex ^= 1;
11570 VMA_HEAVY_ASSERT(Validate());
11577 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11578 VmaBlockMetadata(hAllocator),
11580 m_AllocationCount(0),
11584 memset(m_FreeList, 0,
sizeof(m_FreeList));
11587 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11589 DeleteNode(m_Root);
11592 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11594 VmaBlockMetadata::Init(size);
11596 m_UsableSize = VmaPrevPow2(size);
11597 m_SumFreeSize = m_UsableSize;
11601 while(m_LevelCount < MAX_LEVELS &&
11602 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11607 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11608 rootNode->offset = 0;
11609 rootNode->type = Node::TYPE_FREE;
11610 rootNode->parent = VMA_NULL;
11611 rootNode->buddy = VMA_NULL;
11614 AddToFreeListFront(0, rootNode);
11617 bool VmaBlockMetadata_Buddy::Validate()
const
11620 ValidationContext ctx;
11621 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11623 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11625 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11626 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11629 for(uint32_t level = 0; level < m_LevelCount; ++level)
11631 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11632 m_FreeList[level].front->free.prev == VMA_NULL);
11634 for(Node* node = m_FreeList[level].front;
11636 node = node->free.next)
11638 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11640 if(node->free.next == VMA_NULL)
11642 VMA_VALIDATE(m_FreeList[level].back == node);
11646 VMA_VALIDATE(node->free.next->free.prev == node);
11652 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11654 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11660 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11662 for(uint32_t level = 0; level < m_LevelCount; ++level)
11664 if(m_FreeList[level].front != VMA_NULL)
11666 return LevelToNodeSize(level);
11672 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11674 const VkDeviceSize unusableSize = GetUnusableSize();
11685 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11687 if(unusableSize > 0)
11696 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11698 const VkDeviceSize unusableSize = GetUnusableSize();
11700 inoutStats.
size += GetSize();
11701 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11706 if(unusableSize > 0)
11713 #if VMA_STATS_STRING_ENABLED
11715 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11719 CalcAllocationStatInfo(stat);
11721 PrintDetailedMap_Begin(
11727 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11729 const VkDeviceSize unusableSize = GetUnusableSize();
11730 if(unusableSize > 0)
11732 PrintDetailedMap_UnusedRange(json,
11737 PrintDetailedMap_End(json);
11740 #endif // #if VMA_STATS_STRING_ENABLED
11742 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11743 uint32_t currentFrameIndex,
11744 uint32_t frameInUseCount,
11745 VkDeviceSize bufferImageGranularity,
11746 VkDeviceSize allocSize,
11747 VkDeviceSize allocAlignment,
11749 VmaSuballocationType allocType,
11750 bool canMakeOtherLost,
11752 VmaAllocationRequest* pAllocationRequest)
11754 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11758 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11759 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11760 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11762 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11763 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11766 if(allocSize > m_UsableSize)
11771 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11772 for(uint32_t level = targetLevel + 1; level--; )
11774 for(Node* freeNode = m_FreeList[level].front;
11775 freeNode != VMA_NULL;
11776 freeNode = freeNode->free.next)
11778 if(freeNode->offset % allocAlignment == 0)
11780 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11781 pAllocationRequest->offset = freeNode->offset;
11782 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11783 pAllocationRequest->sumItemSize = 0;
11784 pAllocationRequest->itemsToMakeLostCount = 0;
11785 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11794 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11795 uint32_t currentFrameIndex,
11796 uint32_t frameInUseCount,
11797 VmaAllocationRequest* pAllocationRequest)
11803 return pAllocationRequest->itemsToMakeLostCount == 0;
11806 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11815 void VmaBlockMetadata_Buddy::Alloc(
11816 const VmaAllocationRequest& request,
11817 VmaSuballocationType type,
11818 VkDeviceSize allocSize,
11821 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11823 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11824 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11826 Node* currNode = m_FreeList[currLevel].front;
11827 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11828 while(currNode->offset != request.offset)
11830 currNode = currNode->free.next;
11831 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11835 while(currLevel < targetLevel)
11839 RemoveFromFreeList(currLevel, currNode);
11841 const uint32_t childrenLevel = currLevel + 1;
11844 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11845 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11847 leftChild->offset = currNode->offset;
11848 leftChild->type = Node::TYPE_FREE;
11849 leftChild->parent = currNode;
11850 leftChild->buddy = rightChild;
11852 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11853 rightChild->type = Node::TYPE_FREE;
11854 rightChild->parent = currNode;
11855 rightChild->buddy = leftChild;
11858 currNode->type = Node::TYPE_SPLIT;
11859 currNode->split.leftChild = leftChild;
11862 AddToFreeListFront(childrenLevel, rightChild);
11863 AddToFreeListFront(childrenLevel, leftChild);
11868 currNode = m_FreeList[currLevel].front;
11877 VMA_ASSERT(currLevel == targetLevel &&
11878 currNode != VMA_NULL &&
11879 currNode->type == Node::TYPE_FREE);
11880 RemoveFromFreeList(currLevel, currNode);
11883 currNode->type = Node::TYPE_ALLOCATION;
11884 currNode->allocation.alloc = hAllocation;
11886 ++m_AllocationCount;
11888 m_SumFreeSize -= allocSize;
11891 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11893 if(node->type == Node::TYPE_SPLIT)
11895 DeleteNode(node->split.leftChild->buddy);
11896 DeleteNode(node->split.leftChild);
11899 vma_delete(GetAllocationCallbacks(), node);
11902 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11904 VMA_VALIDATE(level < m_LevelCount);
11905 VMA_VALIDATE(curr->parent == parent);
11906 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11907 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11910 case Node::TYPE_FREE:
11912 ctx.calculatedSumFreeSize += levelNodeSize;
11913 ++ctx.calculatedFreeCount;
11915 case Node::TYPE_ALLOCATION:
11916 ++ctx.calculatedAllocationCount;
11917 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11918 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11920 case Node::TYPE_SPLIT:
11922 const uint32_t childrenLevel = level + 1;
11923 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11924 const Node*
const leftChild = curr->split.leftChild;
11925 VMA_VALIDATE(leftChild != VMA_NULL);
11926 VMA_VALIDATE(leftChild->offset == curr->offset);
11927 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11929 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11931 const Node*
const rightChild = leftChild->buddy;
11932 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11933 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11935 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11946 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11949 uint32_t level = 0;
11950 VkDeviceSize currLevelNodeSize = m_UsableSize;
11951 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11952 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11955 currLevelNodeSize = nextLevelNodeSize;
11956 nextLevelNodeSize = currLevelNodeSize >> 1;
11961 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11964 Node* node = m_Root;
11965 VkDeviceSize nodeOffset = 0;
11966 uint32_t level = 0;
11967 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11968 while(node->type == Node::TYPE_SPLIT)
11970 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11971 if(offset < nodeOffset + nextLevelSize)
11973 node = node->split.leftChild;
11977 node = node->split.leftChild->buddy;
11978 nodeOffset += nextLevelSize;
11981 levelNodeSize = nextLevelSize;
11984 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11985 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11988 --m_AllocationCount;
11989 m_SumFreeSize += alloc->GetSize();
11991 node->type = Node::TYPE_FREE;
11994 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11996 RemoveFromFreeList(level, node->buddy);
11997 Node*
const parent = node->parent;
11999 vma_delete(GetAllocationCallbacks(), node->buddy);
12000 vma_delete(GetAllocationCallbacks(), node);
12001 parent->type = Node::TYPE_FREE;
12009 AddToFreeListFront(level, node);
12012 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
12016 case Node::TYPE_FREE:
12022 case Node::TYPE_ALLOCATION:
12024 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12030 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
12031 if(unusedRangeSize > 0)
12040 case Node::TYPE_SPLIT:
12042 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12043 const Node*
const leftChild = node->split.leftChild;
12044 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
12045 const Node*
const rightChild = leftChild->buddy;
12046 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
12054 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
12056 VMA_ASSERT(node->type == Node::TYPE_FREE);
12059 Node*
const frontNode = m_FreeList[level].front;
12060 if(frontNode == VMA_NULL)
12062 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
12063 node->free.prev = node->free.next = VMA_NULL;
12064 m_FreeList[level].front = m_FreeList[level].back = node;
12068 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
12069 node->free.prev = VMA_NULL;
12070 node->free.next = frontNode;
12071 frontNode->free.prev = node;
12072 m_FreeList[level].front = node;
12076 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
12078 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
12081 if(node->free.prev == VMA_NULL)
12083 VMA_ASSERT(m_FreeList[level].front == node);
12084 m_FreeList[level].front = node->free.next;
12088 Node*
const prevFreeNode = node->free.prev;
12089 VMA_ASSERT(prevFreeNode->free.next == node);
12090 prevFreeNode->free.next = node->free.next;
12094 if(node->free.next == VMA_NULL)
12096 VMA_ASSERT(m_FreeList[level].back == node);
12097 m_FreeList[level].back = node->free.prev;
12101 Node*
const nextFreeNode = node->free.next;
12102 VMA_ASSERT(nextFreeNode->free.prev == node);
12103 nextFreeNode->free.prev = node->free.prev;
12107 #if VMA_STATS_STRING_ENABLED
12108 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
12112 case Node::TYPE_FREE:
12113 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
12115 case Node::TYPE_ALLOCATION:
12117 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
12118 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
12119 if(allocSize < levelNodeSize)
12121 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
12125 case Node::TYPE_SPLIT:
12127 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
12128 const Node*
const leftChild = node->split.leftChild;
12129 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
12130 const Node*
const rightChild = leftChild->buddy;
12131 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
12138 #endif // #if VMA_STATS_STRING_ENABLED
12144 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
12145 m_pMetadata(VMA_NULL),
12146 m_MemoryTypeIndex(UINT32_MAX),
12148 m_hMemory(VK_NULL_HANDLE),
12150 m_pMappedData(VMA_NULL)
12154 void VmaDeviceMemoryBlock::Init(
12157 uint32_t newMemoryTypeIndex,
12158 VkDeviceMemory newMemory,
12159 VkDeviceSize newSize,
12161 uint32_t algorithm)
12163 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
12165 m_hParentPool = hParentPool;
12166 m_MemoryTypeIndex = newMemoryTypeIndex;
12168 m_hMemory = newMemory;
12173 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
12176 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
12182 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
12184 m_pMetadata->Init(newSize);
12187 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
12191 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
12193 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
12194 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
12195 m_hMemory = VK_NULL_HANDLE;
12197 vma_delete(allocator, m_pMetadata);
12198 m_pMetadata = VMA_NULL;
12201 bool VmaDeviceMemoryBlock::Validate()
const
12203 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
12204 (m_pMetadata->GetSize() != 0));
12206 return m_pMetadata->Validate();
12209 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
12211 void* pData =
nullptr;
12212 VkResult res = Map(hAllocator, 1, &pData);
12213 if(res != VK_SUCCESS)
12218 res = m_pMetadata->CheckCorruption(pData);
12220 Unmap(hAllocator, 1);
12225 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
12232 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12233 if(m_MapCount != 0)
12235 m_MapCount += count;
12236 VMA_ASSERT(m_pMappedData != VMA_NULL);
12237 if(ppData != VMA_NULL)
12239 *ppData = m_pMappedData;
12245 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
12246 hAllocator->m_hDevice,
12252 if(result == VK_SUCCESS)
12254 if(ppData != VMA_NULL)
12256 *ppData = m_pMappedData;
12258 m_MapCount = count;
12264 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12271 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12272 if(m_MapCount >= count)
12274 m_MapCount -= count;
12275 if(m_MapCount == 0)
12277 m_pMappedData = VMA_NULL;
12278 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12283 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12287 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12289 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12290 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12293 VkResult res = Map(hAllocator, 1, &pData);
12294 if(res != VK_SUCCESS)
12299 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12300 VmaWriteMagicValue(pData, allocOffset + allocSize);
12302 Unmap(hAllocator, 1);
12307 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12309 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12310 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12313 VkResult res = Map(hAllocator, 1, &pData);
12314 if(res != VK_SUCCESS)
12319 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12321 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12323 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12325 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12328 Unmap(hAllocator, 1);
12333 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12336 VkDeviceSize allocationLocalOffset,
12340 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12341 hAllocation->GetBlock() ==
this);
12342 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12343 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12344 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12346 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12347 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12350 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12353 VkDeviceSize allocationLocalOffset,
12357 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12358 hAllocation->GetBlock() ==
this);
12359 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12360 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12361 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12363 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12364 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12369 memset(&outInfo, 0,
sizeof(outInfo));
12388 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12396 VmaPool_T::VmaPool_T(
12399 VkDeviceSize preferredBlockSize) :
12403 createInfo.memoryTypeIndex,
12404 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12405 createInfo.minBlockCount,
12406 createInfo.maxBlockCount,
12408 createInfo.frameInUseCount,
12409 createInfo.blockSize != 0,
12416 VmaPool_T::~VmaPool_T()
12420 void VmaPool_T::SetName(
const char* pName)
12422 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12423 VmaFreeString(allocs, m_Name);
12425 if(pName != VMA_NULL)
12427 m_Name = VmaCreateStringCopy(allocs, pName);
12435 #if VMA_STATS_STRING_ENABLED
12437 #endif // #if VMA_STATS_STRING_ENABLED
12439 VmaBlockVector::VmaBlockVector(
12442 uint32_t memoryTypeIndex,
12443 VkDeviceSize preferredBlockSize,
12444 size_t minBlockCount,
12445 size_t maxBlockCount,
12446 VkDeviceSize bufferImageGranularity,
12447 uint32_t frameInUseCount,
12448 bool explicitBlockSize,
12449 uint32_t algorithm) :
12450 m_hAllocator(hAllocator),
12451 m_hParentPool(hParentPool),
12452 m_MemoryTypeIndex(memoryTypeIndex),
12453 m_PreferredBlockSize(preferredBlockSize),
12454 m_MinBlockCount(minBlockCount),
12455 m_MaxBlockCount(maxBlockCount),
12456 m_BufferImageGranularity(bufferImageGranularity),
12457 m_FrameInUseCount(frameInUseCount),
12458 m_ExplicitBlockSize(explicitBlockSize),
12459 m_Algorithm(algorithm),
12460 m_HasEmptyBlock(false),
12461 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12466 VmaBlockVector::~VmaBlockVector()
12468 for(
size_t i = m_Blocks.size(); i--; )
12470 m_Blocks[i]->Destroy(m_hAllocator);
12471 vma_delete(m_hAllocator, m_Blocks[i]);
12475 VkResult VmaBlockVector::CreateMinBlocks()
12477 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12479 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12480 if(res != VK_SUCCESS)
12488 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12490 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12492 const size_t blockCount = m_Blocks.size();
12501 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12503 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12504 VMA_ASSERT(pBlock);
12505 VMA_HEAVY_ASSERT(pBlock->Validate());
12506 pBlock->m_pMetadata->AddPoolStats(*pStats);
12510 bool VmaBlockVector::IsEmpty()
12512 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12513 return m_Blocks.empty();
12516 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12518 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12519 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12520 (VMA_DEBUG_MARGIN > 0) &&
12522 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12525 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12527 VkResult VmaBlockVector::Allocate(
12528 uint32_t currentFrameIndex,
12530 VkDeviceSize alignment,
12532 VmaSuballocationType suballocType,
12533 size_t allocationCount,
12537 VkResult res = VK_SUCCESS;
12539 if(IsCorruptionDetectionEnabled())
12541 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12542 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12546 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12547 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12549 res = AllocatePage(
12555 pAllocations + allocIndex);
12556 if(res != VK_SUCCESS)
12563 if(res != VK_SUCCESS)
12566 while(allocIndex--)
12568 Free(pAllocations[allocIndex]);
12570 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12576 VkResult VmaBlockVector::AllocatePage(
12577 uint32_t currentFrameIndex,
12579 VkDeviceSize alignment,
12581 VmaSuballocationType suballocType,
12589 VkDeviceSize freeMemory;
12591 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12593 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12597 const bool canFallbackToDedicated = !IsCustomPool();
12598 const bool canCreateNewBlock =
12600 (m_Blocks.size() < m_MaxBlockCount) &&
12601 (freeMemory >= size || !canFallbackToDedicated);
12608 canMakeOtherLost =
false;
12612 if(isUpperAddress &&
12615 return VK_ERROR_FEATURE_NOT_PRESENT;
12629 return VK_ERROR_FEATURE_NOT_PRESENT;
12633 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12635 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12643 if(!canMakeOtherLost || canCreateNewBlock)
12652 if(!m_Blocks.empty())
12654 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12655 VMA_ASSERT(pCurrBlock);
12656 VkResult res = AllocateFromBlock(
12666 if(res == VK_SUCCESS)
12668 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12678 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12680 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12681 VMA_ASSERT(pCurrBlock);
12682 VkResult res = AllocateFromBlock(
12692 if(res == VK_SUCCESS)
12694 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12702 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12704 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12705 VMA_ASSERT(pCurrBlock);
12706 VkResult res = AllocateFromBlock(
12716 if(res == VK_SUCCESS)
12718 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12726 if(canCreateNewBlock)
12729 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12730 uint32_t newBlockSizeShift = 0;
12731 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12733 if(!m_ExplicitBlockSize)
12736 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12737 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12739 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12740 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12742 newBlockSize = smallerNewBlockSize;
12743 ++newBlockSizeShift;
12752 size_t newBlockIndex = 0;
12753 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12754 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12756 if(!m_ExplicitBlockSize)
12758 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12760 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12761 if(smallerNewBlockSize >= size)
12763 newBlockSize = smallerNewBlockSize;
12764 ++newBlockSizeShift;
12765 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12766 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12775 if(res == VK_SUCCESS)
12777 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12778 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12780 res = AllocateFromBlock(
12790 if(res == VK_SUCCESS)
12792 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12798 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12805 if(canMakeOtherLost)
12807 uint32_t tryIndex = 0;
12808 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12810 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12811 VmaAllocationRequest bestRequest = {};
12812 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12818 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12820 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12821 VMA_ASSERT(pCurrBlock);
12822 VmaAllocationRequest currRequest = {};
12823 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12826 m_BufferImageGranularity,
12835 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12836 if(pBestRequestBlock == VMA_NULL ||
12837 currRequestCost < bestRequestCost)
12839 pBestRequestBlock = pCurrBlock;
12840 bestRequest = currRequest;
12841 bestRequestCost = currRequestCost;
12843 if(bestRequestCost == 0)
12854 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12856 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12857 VMA_ASSERT(pCurrBlock);
12858 VmaAllocationRequest currRequest = {};
12859 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12862 m_BufferImageGranularity,
12871 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12872 if(pBestRequestBlock == VMA_NULL ||
12873 currRequestCost < bestRequestCost ||
12876 pBestRequestBlock = pCurrBlock;
12877 bestRequest = currRequest;
12878 bestRequestCost = currRequestCost;
12880 if(bestRequestCost == 0 ||
12890 if(pBestRequestBlock != VMA_NULL)
12894 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12895 if(res != VK_SUCCESS)
12901 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12907 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12908 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12909 UpdateHasEmptyBlock();
12910 (*pAllocation)->InitBlockAllocation(
12912 bestRequest.offset,
12919 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12920 VMA_DEBUG_LOG(
" Returned from existing block");
12921 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12922 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12923 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12925 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12927 if(IsCorruptionDetectionEnabled())
12929 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12930 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12945 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12947 return VK_ERROR_TOO_MANY_OBJECTS;
12951 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12954 void VmaBlockVector::Free(
12957 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12959 bool budgetExceeded =
false;
12961 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12963 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12964 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12969 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12971 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12973 if(IsCorruptionDetectionEnabled())
12975 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12976 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12979 if(hAllocation->IsPersistentMap())
12981 pBlock->Unmap(m_hAllocator, 1);
12984 pBlock->m_pMetadata->Free(hAllocation);
12985 VMA_HEAVY_ASSERT(pBlock->Validate());
12987 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12989 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12991 if(pBlock->m_pMetadata->IsEmpty())
12994 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12996 pBlockToDelete = pBlock;
13003 else if(m_HasEmptyBlock && canDeleteBlock)
13005 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
13006 if(pLastBlock->m_pMetadata->IsEmpty())
13008 pBlockToDelete = pLastBlock;
13009 m_Blocks.pop_back();
13013 UpdateHasEmptyBlock();
13014 IncrementallySortBlocks();
13019 if(pBlockToDelete != VMA_NULL)
13021 VMA_DEBUG_LOG(
" Deleted empty block");
13022 pBlockToDelete->Destroy(m_hAllocator);
13023 vma_delete(m_hAllocator, pBlockToDelete);
13027 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
13029 VkDeviceSize result = 0;
13030 for(
size_t i = m_Blocks.size(); i--; )
13032 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
13033 if(result >= m_PreferredBlockSize)
13041 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
13043 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13045 if(m_Blocks[blockIndex] == pBlock)
13047 VmaVectorRemove(m_Blocks, blockIndex);
13054 void VmaBlockVector::IncrementallySortBlocks()
13059 for(
size_t i = 1; i < m_Blocks.size(); ++i)
13061 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
13063 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
13070 VkResult VmaBlockVector::AllocateFromBlock(
13071 VmaDeviceMemoryBlock* pBlock,
13072 uint32_t currentFrameIndex,
13074 VkDeviceSize alignment,
13077 VmaSuballocationType suballocType,
13086 VmaAllocationRequest currRequest = {};
13087 if(pBlock->m_pMetadata->CreateAllocationRequest(
13090 m_BufferImageGranularity,
13100 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
13104 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
13105 if(res != VK_SUCCESS)
13111 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
13112 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
13113 UpdateHasEmptyBlock();
13114 (*pAllocation)->InitBlockAllocation(
13116 currRequest.offset,
13123 VMA_HEAVY_ASSERT(pBlock->Validate());
13124 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
13125 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
13126 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
13128 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
13130 if(IsCorruptionDetectionEnabled())
13132 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
13133 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
13137 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13140 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
13142 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
13143 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
13144 allocInfo.allocationSize = blockSize;
13146 #if VMA_BUFFER_DEVICE_ADDRESS
13148 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
13149 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
13151 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
13152 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
13154 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
13156 VkDeviceMemory mem = VK_NULL_HANDLE;
13157 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
13166 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
13172 allocInfo.allocationSize,
13176 m_Blocks.push_back(pBlock);
13177 if(pNewBlockIndex != VMA_NULL)
13179 *pNewBlockIndex = m_Blocks.size() - 1;
13185 void VmaBlockVector::ApplyDefragmentationMovesCpu(
13186 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13187 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
13189 const size_t blockCount = m_Blocks.size();
13190 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
13194 BLOCK_FLAG_USED = 0x00000001,
13195 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
13203 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
13204 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
13205 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
13208 const size_t moveCount = moves.size();
13209 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13211 const VmaDefragmentationMove& move = moves[moveIndex];
13212 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
13213 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
13216 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13219 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13221 BlockInfo& currBlockInfo = blockInfo[blockIndex];
13222 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13223 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
13225 currBlockInfo.pMappedData = pBlock->GetMappedData();
13227 if(currBlockInfo.pMappedData == VMA_NULL)
13229 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
13230 if(pDefragCtx->res == VK_SUCCESS)
13232 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
13239 if(pDefragCtx->res == VK_SUCCESS)
13241 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13242 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13244 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13246 const VmaDefragmentationMove& move = moves[moveIndex];
13248 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
13249 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
13251 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
13256 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
13257 memRange.memory = pSrcBlock->GetDeviceMemory();
13258 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
13259 memRange.size = VMA_MIN(
13260 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
13261 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
13262 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13267 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
13268 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
13269 static_cast<size_t>(move.size));
13271 if(IsCorruptionDetectionEnabled())
13273 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13274 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13280 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13281 memRange.memory = pDstBlock->GetDeviceMemory();
13282 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13283 memRange.size = VMA_MIN(
13284 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13285 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13286 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13293 for(
size_t blockIndex = blockCount; blockIndex--; )
13295 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13296 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13298 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13299 pBlock->Unmap(m_hAllocator, 1);
13304 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13305 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13306 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13307 VkCommandBuffer commandBuffer)
13309 const size_t blockCount = m_Blocks.size();
13311 pDefragCtx->blockContexts.resize(blockCount);
13312 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13315 const size_t moveCount = moves.size();
13316 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13318 const VmaDefragmentationMove& move = moves[moveIndex];
13323 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13324 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13328 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13332 VkBufferCreateInfo bufCreateInfo;
13333 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13335 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13337 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13338 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13339 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13341 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13342 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13343 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13344 if(pDefragCtx->res == VK_SUCCESS)
13346 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13347 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13354 if(pDefragCtx->res == VK_SUCCESS)
13356 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13358 const VmaDefragmentationMove& move = moves[moveIndex];
13360 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13361 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13363 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13365 VkBufferCopy region = {
13369 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13370 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13375 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13377 pDefragCtx->res = VK_NOT_READY;
13383 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13385 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13386 if(pBlock->m_pMetadata->IsEmpty())
13388 if(m_Blocks.size() > m_MinBlockCount)
13390 if(pDefragmentationStats != VMA_NULL)
13393 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13396 VmaVectorRemove(m_Blocks, blockIndex);
13397 pBlock->Destroy(m_hAllocator);
13398 vma_delete(m_hAllocator, pBlock);
13406 UpdateHasEmptyBlock();
13409 void VmaBlockVector::UpdateHasEmptyBlock()
13411 m_HasEmptyBlock =
false;
13412 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13414 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13415 if(pBlock->m_pMetadata->IsEmpty())
13417 m_HasEmptyBlock =
true;
13423 #if VMA_STATS_STRING_ENABLED
13425 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13427 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13429 json.BeginObject();
13433 const char* poolName = m_hParentPool->GetName();
13434 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13436 json.WriteString(
"Name");
13437 json.WriteString(poolName);
13440 json.WriteString(
"MemoryTypeIndex");
13441 json.WriteNumber(m_MemoryTypeIndex);
13443 json.WriteString(
"BlockSize");
13444 json.WriteNumber(m_PreferredBlockSize);
13446 json.WriteString(
"BlockCount");
13447 json.BeginObject(
true);
13448 if(m_MinBlockCount > 0)
13450 json.WriteString(
"Min");
13451 json.WriteNumber((uint64_t)m_MinBlockCount);
13453 if(m_MaxBlockCount < SIZE_MAX)
13455 json.WriteString(
"Max");
13456 json.WriteNumber((uint64_t)m_MaxBlockCount);
13458 json.WriteString(
"Cur");
13459 json.WriteNumber((uint64_t)m_Blocks.size());
13462 if(m_FrameInUseCount > 0)
13464 json.WriteString(
"FrameInUseCount");
13465 json.WriteNumber(m_FrameInUseCount);
13468 if(m_Algorithm != 0)
13470 json.WriteString(
"Algorithm");
13471 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13476 json.WriteString(
"PreferredBlockSize");
13477 json.WriteNumber(m_PreferredBlockSize);
13480 json.WriteString(
"Blocks");
13481 json.BeginObject();
13482 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13484 json.BeginString();
13485 json.ContinueString(m_Blocks[i]->GetId());
13488 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13495 #endif // #if VMA_STATS_STRING_ENABLED
13497 void VmaBlockVector::Defragment(
13498 class VmaBlockVectorDefragmentationContext* pCtx,
13500 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13501 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13502 VkCommandBuffer commandBuffer)
13504 pCtx->res = VK_SUCCESS;
13506 const VkMemoryPropertyFlags memPropFlags =
13507 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13508 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13510 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13512 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13513 !IsCorruptionDetectionEnabled() &&
13514 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13517 if(canDefragmentOnCpu || canDefragmentOnGpu)
13519 bool defragmentOnGpu;
13521 if(canDefragmentOnGpu != canDefragmentOnCpu)
13523 defragmentOnGpu = canDefragmentOnGpu;
13528 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13529 m_hAllocator->IsIntegratedGpu();
13532 bool overlappingMoveSupported = !defragmentOnGpu;
13534 if(m_hAllocator->m_UseMutex)
13538 if(!m_Mutex.TryLockWrite())
13540 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13546 m_Mutex.LockWrite();
13547 pCtx->mutexLocked =
true;
13551 pCtx->Begin(overlappingMoveSupported, flags);
13555 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13556 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13557 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13560 if(pStats != VMA_NULL)
13562 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13563 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13566 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13567 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13568 if(defragmentOnGpu)
13570 maxGpuBytesToMove -= bytesMoved;
13571 maxGpuAllocationsToMove -= allocationsMoved;
13575 maxCpuBytesToMove -= bytesMoved;
13576 maxCpuAllocationsToMove -= allocationsMoved;
13582 if(m_hAllocator->m_UseMutex)
13583 m_Mutex.UnlockWrite();
13585 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13586 pCtx->res = VK_NOT_READY;
13591 if(pCtx->res >= VK_SUCCESS)
13593 if(defragmentOnGpu)
13595 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13599 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13605 void VmaBlockVector::DefragmentationEnd(
13606 class VmaBlockVectorDefragmentationContext* pCtx,
13612 VMA_ASSERT(pCtx->mutexLocked ==
false);
13616 m_Mutex.LockWrite();
13617 pCtx->mutexLocked =
true;
13621 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13624 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13626 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13627 if(blockCtx.hBuffer)
13629 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13633 if(pCtx->res >= VK_SUCCESS)
13635 FreeEmptyBlocks(pStats);
13639 if(pCtx->mutexLocked)
13641 VMA_ASSERT(m_hAllocator->m_UseMutex);
13642 m_Mutex.UnlockWrite();
13646 uint32_t VmaBlockVector::ProcessDefragmentations(
13647 class VmaBlockVectorDefragmentationContext *pCtx,
13650 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13652 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13654 for(uint32_t i = 0; i < moveCount; ++ i)
13656 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13659 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13660 pMove->
offset = move.dstOffset;
13665 pCtx->defragmentationMovesProcessed += moveCount;
13670 void VmaBlockVector::CommitDefragmentations(
13671 class VmaBlockVectorDefragmentationContext *pCtx,
13674 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13676 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13678 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13680 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13681 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13684 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13685 FreeEmptyBlocks(pStats);
13688 size_t VmaBlockVector::CalcAllocationCount()
const
13691 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13693 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13698 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13700 if(m_BufferImageGranularity == 1)
13704 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13705 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13707 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13708 VMA_ASSERT(m_Algorithm == 0);
13709 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13710 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13718 void VmaBlockVector::MakePoolAllocationsLost(
13719 uint32_t currentFrameIndex,
13720 size_t* pLostAllocationCount)
13722 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13723 size_t lostAllocationCount = 0;
13724 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13726 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13727 VMA_ASSERT(pBlock);
13728 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13730 if(pLostAllocationCount != VMA_NULL)
13732 *pLostAllocationCount = lostAllocationCount;
13736 VkResult VmaBlockVector::CheckCorruption()
13738 if(!IsCorruptionDetectionEnabled())
13740 return VK_ERROR_FEATURE_NOT_PRESENT;
13743 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13744 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13746 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13747 VMA_ASSERT(pBlock);
13748 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13749 if(res != VK_SUCCESS)
13757 void VmaBlockVector::AddStats(
VmaStats* pStats)
13759 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13760 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13762 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13764 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13766 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13767 VMA_ASSERT(pBlock);
13768 VMA_HEAVY_ASSERT(pBlock->Validate());
13770 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13771 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13772 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13773 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13780 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13782 VmaBlockVector* pBlockVector,
13783 uint32_t currentFrameIndex,
13784 bool overlappingMoveSupported) :
13785 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13786 m_AllocationCount(0),
13787 m_AllAllocations(false),
13789 m_AllocationsMoved(0),
13790 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13793 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13794 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13796 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13797 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13798 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13799 m_Blocks.push_back(pBlockInfo);
13803 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13806 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13808 for(
size_t i = m_Blocks.size(); i--; )
13810 vma_delete(m_hAllocator, m_Blocks[i]);
13814 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13817 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13819 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13820 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13821 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13823 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13824 (*it)->m_Allocations.push_back(allocInfo);
13831 ++m_AllocationCount;
13835 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13836 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13837 VkDeviceSize maxBytesToMove,
13838 uint32_t maxAllocationsToMove,
13839 bool freeOldAllocations)
13841 if(m_Blocks.empty())
13854 size_t srcBlockMinIndex = 0;
13867 size_t srcBlockIndex = m_Blocks.size() - 1;
13868 size_t srcAllocIndex = SIZE_MAX;
13874 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13876 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13879 if(srcBlockIndex == srcBlockMinIndex)
13886 srcAllocIndex = SIZE_MAX;
13891 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13895 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13896 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13898 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13899 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13900 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13901 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13904 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13906 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13907 VmaAllocationRequest dstAllocRequest;
13908 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13909 m_CurrentFrameIndex,
13910 m_pBlockVector->GetFrameInUseCount(),
13911 m_pBlockVector->GetBufferImageGranularity(),
13918 &dstAllocRequest) &&
13920 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13922 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13925 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13926 (m_BytesMoved + size > maxBytesToMove))
13931 VmaDefragmentationMove move = {};
13932 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13933 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13934 move.srcOffset = srcOffset;
13935 move.dstOffset = dstAllocRequest.offset;
13937 move.hAllocation = allocInfo.m_hAllocation;
13938 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13939 move.pDstBlock = pDstBlockInfo->m_pBlock;
13941 moves.push_back(move);
13943 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13947 allocInfo.m_hAllocation);
13949 if(freeOldAllocations)
13951 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13952 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13955 if(allocInfo.m_pChanged != VMA_NULL)
13957 *allocInfo.m_pChanged = VK_TRUE;
13960 ++m_AllocationsMoved;
13961 m_BytesMoved += size;
13963 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13971 if(srcAllocIndex > 0)
13977 if(srcBlockIndex > 0)
13980 srcAllocIndex = SIZE_MAX;
13990 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13993 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13995 if(m_Blocks[i]->m_HasNonMovableAllocations)
14003 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
14004 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14005 VkDeviceSize maxBytesToMove,
14006 uint32_t maxAllocationsToMove,
14009 if(!m_AllAllocations && m_AllocationCount == 0)
14014 const size_t blockCount = m_Blocks.size();
14015 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14017 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
14019 if(m_AllAllocations)
14021 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
14022 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
14023 it != pMetadata->m_Suballocations.end();
14026 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
14028 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
14029 pBlockInfo->m_Allocations.push_back(allocInfo);
14034 pBlockInfo->CalcHasNonMovableAllocations();
14038 pBlockInfo->SortAllocationsByOffsetDescending();
14044 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
14047 const uint32_t roundCount = 2;
14050 VkResult result = VK_SUCCESS;
14051 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
14059 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
14060 size_t dstBlockIndex, VkDeviceSize dstOffset,
14061 size_t srcBlockIndex, VkDeviceSize srcOffset)
14063 if(dstBlockIndex < srcBlockIndex)
14067 if(dstBlockIndex > srcBlockIndex)
14071 if(dstOffset < srcOffset)
14081 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
14083 VmaBlockVector* pBlockVector,
14084 uint32_t currentFrameIndex,
14085 bool overlappingMoveSupported) :
14086 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
14087 m_OverlappingMoveSupported(overlappingMoveSupported),
14088 m_AllocationCount(0),
14089 m_AllAllocations(false),
14091 m_AllocationsMoved(0),
14092 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
14094 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
14098 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
14102 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
14103 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
14104 VkDeviceSize maxBytesToMove,
14105 uint32_t maxAllocationsToMove,
14108 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
14110 const size_t blockCount = m_pBlockVector->GetBlockCount();
14111 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
14116 PreprocessMetadata();
14120 m_BlockInfos.resize(blockCount);
14121 for(
size_t i = 0; i < blockCount; ++i)
14123 m_BlockInfos[i].origBlockIndex = i;
14126 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
14127 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
14128 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
14133 FreeSpaceDatabase freeSpaceDb;
14135 size_t dstBlockInfoIndex = 0;
14136 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14137 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14138 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14139 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
14140 VkDeviceSize dstOffset = 0;
14143 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
14145 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
14146 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
14147 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
14148 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
14149 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
14151 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
14152 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
14153 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
14154 if(m_AllocationsMoved == maxAllocationsToMove ||
14155 m_BytesMoved + srcAllocSize > maxBytesToMove)
14160 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
14162 VmaDefragmentationMove move = {};
14164 size_t freeSpaceInfoIndex;
14165 VkDeviceSize dstAllocOffset;
14166 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
14167 freeSpaceInfoIndex, dstAllocOffset))
14169 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
14170 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
14171 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
14174 if(freeSpaceInfoIndex == srcBlockInfoIndex)
14176 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14180 VmaSuballocation suballoc = *srcSuballocIt;
14181 suballoc.offset = dstAllocOffset;
14182 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
14183 m_BytesMoved += srcAllocSize;
14184 ++m_AllocationsMoved;
14186 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14188 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14189 srcSuballocIt = nextSuballocIt;
14191 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14193 move.srcBlockIndex = srcOrigBlockIndex;
14194 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14195 move.srcOffset = srcAllocOffset;
14196 move.dstOffset = dstAllocOffset;
14197 move.size = srcAllocSize;
14199 moves.push_back(move);
14206 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
14208 VmaSuballocation suballoc = *srcSuballocIt;
14209 suballoc.offset = dstAllocOffset;
14210 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
14211 m_BytesMoved += srcAllocSize;
14212 ++m_AllocationsMoved;
14214 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14216 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14217 srcSuballocIt = nextSuballocIt;
14219 InsertSuballoc(pFreeSpaceMetadata, suballoc);
14221 move.srcBlockIndex = srcOrigBlockIndex;
14222 move.dstBlockIndex = freeSpaceOrigBlockIndex;
14223 move.srcOffset = srcAllocOffset;
14224 move.dstOffset = dstAllocOffset;
14225 move.size = srcAllocSize;
14227 moves.push_back(move);
14232 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
14235 while(dstBlockInfoIndex < srcBlockInfoIndex &&
14236 dstAllocOffset + srcAllocSize > dstBlockSize)
14239 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
14241 ++dstBlockInfoIndex;
14242 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
14243 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
14244 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
14245 dstBlockSize = pDstMetadata->GetSize();
14247 dstAllocOffset = 0;
14251 if(dstBlockInfoIndex == srcBlockInfoIndex)
14253 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
14255 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
14257 bool skipOver = overlap;
14258 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
14262 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
14267 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
14269 dstOffset = srcAllocOffset + srcAllocSize;
14275 srcSuballocIt->offset = dstAllocOffset;
14276 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14277 dstOffset = dstAllocOffset + srcAllocSize;
14278 m_BytesMoved += srcAllocSize;
14279 ++m_AllocationsMoved;
14282 move.srcBlockIndex = srcOrigBlockIndex;
14283 move.dstBlockIndex = dstOrigBlockIndex;
14284 move.srcOffset = srcAllocOffset;
14285 move.dstOffset = dstAllocOffset;
14286 move.size = srcAllocSize;
14288 moves.push_back(move);
14296 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14297 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14299 VmaSuballocation suballoc = *srcSuballocIt;
14300 suballoc.offset = dstAllocOffset;
14301 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14302 dstOffset = dstAllocOffset + srcAllocSize;
14303 m_BytesMoved += srcAllocSize;
14304 ++m_AllocationsMoved;
14306 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14308 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14309 srcSuballocIt = nextSuballocIt;
14311 pDstMetadata->m_Suballocations.push_back(suballoc);
14313 move.srcBlockIndex = srcOrigBlockIndex;
14314 move.dstBlockIndex = dstOrigBlockIndex;
14315 move.srcOffset = srcAllocOffset;
14316 move.dstOffset = dstAllocOffset;
14317 move.size = srcAllocSize;
14319 moves.push_back(move);
14325 m_BlockInfos.clear();
14327 PostprocessMetadata();
14332 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14334 const size_t blockCount = m_pBlockVector->GetBlockCount();
14335 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14337 VmaBlockMetadata_Generic*
const pMetadata =
14338 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14339 pMetadata->m_FreeCount = 0;
14340 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14341 pMetadata->m_FreeSuballocationsBySize.clear();
14342 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14343 it != pMetadata->m_Suballocations.end(); )
14345 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14347 VmaSuballocationList::iterator nextIt = it;
14349 pMetadata->m_Suballocations.erase(it);
14360 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14362 const size_t blockCount = m_pBlockVector->GetBlockCount();
14363 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14365 VmaBlockMetadata_Generic*
const pMetadata =
14366 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14367 const VkDeviceSize blockSize = pMetadata->GetSize();
14370 if(pMetadata->m_Suballocations.empty())
14372 pMetadata->m_FreeCount = 1;
14374 VmaSuballocation suballoc = {
14378 VMA_SUBALLOCATION_TYPE_FREE };
14379 pMetadata->m_Suballocations.push_back(suballoc);
14380 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14385 VkDeviceSize offset = 0;
14386 VmaSuballocationList::iterator it;
14387 for(it = pMetadata->m_Suballocations.begin();
14388 it != pMetadata->m_Suballocations.end();
14391 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14392 VMA_ASSERT(it->offset >= offset);
14395 if(it->offset > offset)
14397 ++pMetadata->m_FreeCount;
14398 const VkDeviceSize freeSize = it->offset - offset;
14399 VmaSuballocation suballoc = {
14403 VMA_SUBALLOCATION_TYPE_FREE };
14404 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14405 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14407 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14411 pMetadata->m_SumFreeSize -= it->size;
14412 offset = it->offset + it->size;
14416 if(offset < blockSize)
14418 ++pMetadata->m_FreeCount;
14419 const VkDeviceSize freeSize = blockSize - offset;
14420 VmaSuballocation suballoc = {
14424 VMA_SUBALLOCATION_TYPE_FREE };
14425 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14426 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14427 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14429 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14434 pMetadata->m_FreeSuballocationsBySize.begin(),
14435 pMetadata->m_FreeSuballocationsBySize.end(),
14436 VmaSuballocationItemSizeLess());
14439 VMA_HEAVY_ASSERT(pMetadata->Validate());
14443 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14446 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14447 while(it != pMetadata->m_Suballocations.end())
14449 if(it->offset < suballoc.offset)
14454 pMetadata->m_Suballocations.insert(it, suballoc);
14460 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14463 VmaBlockVector* pBlockVector,
14464 uint32_t currFrameIndex) :
14466 mutexLocked(false),
14467 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14468 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14469 defragmentationMovesProcessed(0),
14470 defragmentationMovesCommitted(0),
14471 hasDefragmentationPlan(0),
14472 m_hAllocator(hAllocator),
14473 m_hCustomPool(hCustomPool),
14474 m_pBlockVector(pBlockVector),
14475 m_CurrFrameIndex(currFrameIndex),
14476 m_pAlgorithm(VMA_NULL),
14477 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14478 m_AllAllocations(false)
14482 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14484 vma_delete(m_hAllocator, m_pAlgorithm);
14487 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14489 AllocInfo info = { hAlloc, pChanged };
14490 m_Allocations.push_back(info);
14493 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14495 const bool allAllocations = m_AllAllocations ||
14496 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14509 if(VMA_DEBUG_MARGIN == 0 &&
14511 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14514 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14515 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14519 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14520 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14525 m_pAlgorithm->AddAll();
14529 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14531 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14539 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14541 uint32_t currFrameIndex,
14544 m_hAllocator(hAllocator),
14545 m_CurrFrameIndex(currFrameIndex),
14548 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14550 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14553 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14555 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14557 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14558 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14559 vma_delete(m_hAllocator, pBlockVectorCtx);
14561 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14563 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14564 if(pBlockVectorCtx)
14566 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14567 vma_delete(m_hAllocator, pBlockVectorCtx);
14572 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14574 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14576 VmaPool pool = pPools[poolIndex];
14579 if(pool->m_BlockVector.GetAlgorithm() == 0)
14581 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14583 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14585 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14587 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14592 if(!pBlockVectorDefragCtx)
14594 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14597 &pool->m_BlockVector,
14599 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14602 pBlockVectorDefragCtx->AddAll();
14607 void VmaDefragmentationContext_T::AddAllocations(
14608 uint32_t allocationCount,
14610 VkBool32* pAllocationsChanged)
14613 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14616 VMA_ASSERT(hAlloc);
14618 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14620 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14622 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14624 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14626 if(hAllocPool != VK_NULL_HANDLE)
14629 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14631 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14633 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14635 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14639 if(!pBlockVectorDefragCtx)
14641 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14644 &hAllocPool->m_BlockVector,
14646 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14653 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14654 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14655 if(!pBlockVectorDefragCtx)
14657 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14660 m_hAllocator->m_pBlockVectors[memTypeIndex],
14662 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14666 if(pBlockVectorDefragCtx)
14668 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14669 &pAllocationsChanged[allocIndex] : VMA_NULL;
14670 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14676 VkResult VmaDefragmentationContext_T::Defragment(
14677 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14678 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14690 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14691 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14693 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14694 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14696 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14697 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14700 return VK_NOT_READY;
14703 if(commandBuffer == VK_NULL_HANDLE)
14705 maxGpuBytesToMove = 0;
14706 maxGpuAllocationsToMove = 0;
14709 VkResult res = VK_SUCCESS;
14712 for(uint32_t memTypeIndex = 0;
14713 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14716 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14717 if(pBlockVectorCtx)
14719 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14720 pBlockVectorCtx->GetBlockVector()->Defragment(
14723 maxCpuBytesToMove, maxCpuAllocationsToMove,
14724 maxGpuBytesToMove, maxGpuAllocationsToMove,
14726 if(pBlockVectorCtx->res != VK_SUCCESS)
14728 res = pBlockVectorCtx->res;
14734 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14735 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14738 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14739 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14740 pBlockVectorCtx->GetBlockVector()->Defragment(
14743 maxCpuBytesToMove, maxCpuAllocationsToMove,
14744 maxGpuBytesToMove, maxGpuAllocationsToMove,
14746 if(pBlockVectorCtx->res != VK_SUCCESS)
14748 res = pBlockVectorCtx->res;
14761 for(uint32_t memTypeIndex = 0;
14762 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14765 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14766 if(pBlockVectorCtx)
14768 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14770 if(!pBlockVectorCtx->hasDefragmentationPlan)
14772 pBlockVectorCtx->GetBlockVector()->Defragment(
14775 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14776 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14779 if(pBlockVectorCtx->res < VK_SUCCESS)
14782 pBlockVectorCtx->hasDefragmentationPlan =
true;
14785 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14787 pCurrentMove, movesLeft);
14789 movesLeft -= processed;
14790 pCurrentMove += processed;
14795 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14796 customCtxIndex < customCtxCount;
14799 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14800 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14802 if(!pBlockVectorCtx->hasDefragmentationPlan)
14804 pBlockVectorCtx->GetBlockVector()->Defragment(
14807 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14808 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14811 if(pBlockVectorCtx->res < VK_SUCCESS)
14814 pBlockVectorCtx->hasDefragmentationPlan =
true;
14817 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14819 pCurrentMove, movesLeft);
14821 movesLeft -= processed;
14822 pCurrentMove += processed;
14829 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14831 VkResult res = VK_SUCCESS;
14834 for(uint32_t memTypeIndex = 0;
14835 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14838 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14839 if(pBlockVectorCtx)
14841 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14843 if(!pBlockVectorCtx->hasDefragmentationPlan)
14845 res = VK_NOT_READY;
14849 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14850 pBlockVectorCtx, m_pStats);
14852 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14853 res = VK_NOT_READY;
14858 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14859 customCtxIndex < customCtxCount;
14862 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14863 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14865 if(!pBlockVectorCtx->hasDefragmentationPlan)
14867 res = VK_NOT_READY;
14871 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14872 pBlockVectorCtx, m_pStats);
14874 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14875 res = VK_NOT_READY;
14884 #if VMA_RECORDING_ENABLED
14886 VmaRecorder::VmaRecorder() :
14890 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
14896 m_UseMutex = useMutex;
14897 m_Flags = settings.
flags;
14899 #if defined(_WIN32)
14901 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14905 return VK_ERROR_INITIALIZATION_FAILED;
14909 m_File = fopen(settings.
pFilePath,
"wb");
14913 return VK_ERROR_INITIALIZATION_FAILED;
14918 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14919 fprintf(m_File,
"%s\n",
"1,8");
14924 VmaRecorder::~VmaRecorder()
14926 if(m_File != VMA_NULL)
14932 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14934 CallParams callParams;
14935 GetBasicParams(callParams);
14937 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14938 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14942 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14944 CallParams callParams;
14945 GetBasicParams(callParams);
14947 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14948 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14954 CallParams callParams;
14955 GetBasicParams(callParams);
14957 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14958 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14969 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14971 CallParams callParams;
14972 GetBasicParams(callParams);
14974 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14975 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14980 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14981 const VkMemoryRequirements& vkMemReq,
14985 CallParams callParams;
14986 GetBasicParams(callParams);
14988 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14989 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14990 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14992 vkMemReq.alignment,
14993 vkMemReq.memoryTypeBits,
15001 userDataStr.GetString());
15005 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
15006 const VkMemoryRequirements& vkMemReq,
15008 uint64_t allocationCount,
15011 CallParams callParams;
15012 GetBasicParams(callParams);
15014 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15015 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15016 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
15018 vkMemReq.alignment,
15019 vkMemReq.memoryTypeBits,
15026 PrintPointerList(allocationCount, pAllocations);
15027 fprintf(m_File,
",%s\n", userDataStr.GetString());
15031 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
15032 const VkMemoryRequirements& vkMemReq,
15033 bool requiresDedicatedAllocation,
15034 bool prefersDedicatedAllocation,
15038 CallParams callParams;
15039 GetBasicParams(callParams);
15041 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15042 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15043 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15045 vkMemReq.alignment,
15046 vkMemReq.memoryTypeBits,
15047 requiresDedicatedAllocation ? 1 : 0,
15048 prefersDedicatedAllocation ? 1 : 0,
15056 userDataStr.GetString());
15060 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
15061 const VkMemoryRequirements& vkMemReq,
15062 bool requiresDedicatedAllocation,
15063 bool prefersDedicatedAllocation,
15067 CallParams callParams;
15068 GetBasicParams(callParams);
15070 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15071 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
15072 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15074 vkMemReq.alignment,
15075 vkMemReq.memoryTypeBits,
15076 requiresDedicatedAllocation ? 1 : 0,
15077 prefersDedicatedAllocation ? 1 : 0,
15085 userDataStr.GetString());
15089 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
15092 CallParams callParams;
15093 GetBasicParams(callParams);
15095 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15096 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15101 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
15102 uint64_t allocationCount,
15105 CallParams callParams;
15106 GetBasicParams(callParams);
15108 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15109 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
15110 PrintPointerList(allocationCount, pAllocations);
15111 fprintf(m_File,
"\n");
15115 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
15117 const void* pUserData)
15119 CallParams callParams;
15120 GetBasicParams(callParams);
15122 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15123 UserDataString userDataStr(
15126 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15128 userDataStr.GetString());
15132 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
15135 CallParams callParams;
15136 GetBasicParams(callParams);
15138 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15139 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15144 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
15147 CallParams callParams;
15148 GetBasicParams(callParams);
15150 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15151 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15156 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
15159 CallParams callParams;
15160 GetBasicParams(callParams);
15162 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15163 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
15168 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
15169 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15171 CallParams callParams;
15172 GetBasicParams(callParams);
15174 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15175 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15182 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
15183 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
15185 CallParams callParams;
15186 GetBasicParams(callParams);
15188 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15189 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
15196 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
15197 const VkBufferCreateInfo& bufCreateInfo,
15201 CallParams callParams;
15202 GetBasicParams(callParams);
15204 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15205 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15206 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15207 bufCreateInfo.flags,
15208 bufCreateInfo.size,
15209 bufCreateInfo.usage,
15210 bufCreateInfo.sharingMode,
15211 allocCreateInfo.
flags,
15212 allocCreateInfo.
usage,
15216 allocCreateInfo.
pool,
15218 userDataStr.GetString());
15222 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
15223 const VkImageCreateInfo& imageCreateInfo,
15227 CallParams callParams;
15228 GetBasicParams(callParams);
15230 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15231 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
15232 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15233 imageCreateInfo.flags,
15234 imageCreateInfo.imageType,
15235 imageCreateInfo.format,
15236 imageCreateInfo.extent.width,
15237 imageCreateInfo.extent.height,
15238 imageCreateInfo.extent.depth,
15239 imageCreateInfo.mipLevels,
15240 imageCreateInfo.arrayLayers,
15241 imageCreateInfo.samples,
15242 imageCreateInfo.tiling,
15243 imageCreateInfo.usage,
15244 imageCreateInfo.sharingMode,
15245 imageCreateInfo.initialLayout,
15246 allocCreateInfo.
flags,
15247 allocCreateInfo.
usage,
15251 allocCreateInfo.
pool,
15253 userDataStr.GetString());
15257 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
15260 CallParams callParams;
15261 GetBasicParams(callParams);
15263 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15264 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
15269 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
15272 CallParams callParams;
15273 GetBasicParams(callParams);
15275 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15276 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15281 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15284 CallParams callParams;
15285 GetBasicParams(callParams);
15287 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15288 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15293 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15296 CallParams callParams;
15297 GetBasicParams(callParams);
15299 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15300 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15305 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15308 CallParams callParams;
15309 GetBasicParams(callParams);
15311 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15312 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15317 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15321 CallParams callParams;
15322 GetBasicParams(callParams);
15324 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15325 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15328 fprintf(m_File,
",");
15330 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15340 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15343 CallParams callParams;
15344 GetBasicParams(callParams);
15346 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15347 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15352 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15356 CallParams callParams;
15357 GetBasicParams(callParams);
15359 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15360 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15361 pool, name != VMA_NULL ? name :
"");
15367 if(pUserData != VMA_NULL)
15371 m_Str = (
const char*)pUserData;
15376 snprintf(m_PtrStr, 17,
"%p", pUserData);
15386 void VmaRecorder::WriteConfiguration(
15387 const VkPhysicalDeviceProperties& devProps,
15388 const VkPhysicalDeviceMemoryProperties& memProps,
15389 uint32_t vulkanApiVersion,
15390 bool dedicatedAllocationExtensionEnabled,
15391 bool bindMemory2ExtensionEnabled,
15392 bool memoryBudgetExtensionEnabled,
15393 bool deviceCoherentMemoryExtensionEnabled)
15395 fprintf(m_File,
"Config,Begin\n");
15397 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15399 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15400 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15401 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15402 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15403 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15404 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15406 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15407 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15408 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15410 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15411 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15413 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15414 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15416 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15417 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15419 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15420 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15423 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15424 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15425 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15426 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15428 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15429 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15430 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15431 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15432 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15433 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15434 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15435 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15436 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15438 fprintf(m_File,
"Config,End\n");
15441 void VmaRecorder::GetBasicParams(CallParams& outParams)
15443 #if defined(_WIN32)
15444 outParams.threadId = GetCurrentThreadId();
15449 std::thread::id thread_id = std::this_thread::get_id();
15450 stringstream thread_id_to_string_converter;
15451 thread_id_to_string_converter << thread_id;
15452 string thread_id_as_string = thread_id_to_string_converter.str();
15453 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
15456 auto current_time = std::chrono::high_resolution_clock::now();
15458 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
15461 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15465 fprintf(m_File,
"%p", pItems[0]);
15466 for(uint64_t i = 1; i < count; ++i)
15468 fprintf(m_File,
" %p", pItems[i]);
15473 void VmaRecorder::Flush()
15481 #endif // #if VMA_RECORDING_ENABLED
15486 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15487 m_Allocator(pAllocationCallbacks, 1024)
15491 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15493 VmaMutexLock mutexLock(m_Mutex);
15494 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15497 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15499 VmaMutexLock mutexLock(m_Mutex);
15500 m_Allocator.Free(hAlloc);
15508 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15514 m_hDevice(pCreateInfo->device),
15515 m_hInstance(pCreateInfo->instance),
15516 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15517 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15518 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15519 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15520 m_HeapSizeLimitMask(0),
15521 m_PreferredLargeHeapBlockSize(0),
15522 m_PhysicalDevice(pCreateInfo->physicalDevice),
15523 m_CurrentFrameIndex(0),
15524 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15525 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15527 m_GlobalMemoryTypeBits(UINT32_MAX)
15529 ,m_pRecorder(VMA_NULL)
15532 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15534 m_UseKhrDedicatedAllocation =
false;
15535 m_UseKhrBindMemory2 =
false;
15538 if(VMA_DEBUG_DETECT_CORRUPTION)
15541 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15546 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15548 #if !(VMA_DEDICATED_ALLOCATION)
15551 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15554 #if !(VMA_BIND_MEMORY2)
15557 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15561 #if !(VMA_MEMORY_BUDGET)
15564 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15567 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15568 if(m_UseKhrBufferDeviceAddress)
15570 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15573 #if VMA_VULKAN_VERSION < 1002000
15574 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15576 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15579 #if VMA_VULKAN_VERSION < 1001000
15580 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15582 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15586 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15587 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15588 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15590 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15591 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15592 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15603 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15604 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15606 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15607 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15608 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15609 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15614 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15618 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15620 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15621 if(limit != VK_WHOLE_SIZE)
15623 m_HeapSizeLimitMask |= 1u << heapIndex;
15624 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15626 m_MemProps.memoryHeaps[heapIndex].size = limit;
15632 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15634 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15636 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15640 preferredBlockSize,
15643 GetBufferImageGranularity(),
15649 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15656 VkResult res = VK_SUCCESS;
15661 #if VMA_RECORDING_ENABLED
15662 m_pRecorder = vma_new(
this, VmaRecorder)();
15664 if(res != VK_SUCCESS)
15668 m_pRecorder->WriteConfiguration(
15669 m_PhysicalDeviceProperties,
15671 m_VulkanApiVersion,
15672 m_UseKhrDedicatedAllocation,
15673 m_UseKhrBindMemory2,
15674 m_UseExtMemoryBudget,
15675 m_UseAmdDeviceCoherentMemory);
15676 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15678 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15679 return VK_ERROR_FEATURE_NOT_PRESENT;
15683 #if VMA_MEMORY_BUDGET
15684 if(m_UseExtMemoryBudget)
15686 UpdateVulkanBudget();
15688 #endif // #if VMA_MEMORY_BUDGET
15693 VmaAllocator_T::~VmaAllocator_T()
15695 #if VMA_RECORDING_ENABLED
15696 if(m_pRecorder != VMA_NULL)
15698 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15699 vma_delete(
this, m_pRecorder);
15703 VMA_ASSERT(m_Pools.empty());
15705 for(
size_t i = GetMemoryTypeCount(); i--; )
15707 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15709 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15712 vma_delete(
this, m_pDedicatedAllocations[i]);
15713 vma_delete(
this, m_pBlockVectors[i]);
15717 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15719 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15720 ImportVulkanFunctions_Static();
15723 if(pVulkanFunctions != VMA_NULL)
15725 ImportVulkanFunctions_Custom(pVulkanFunctions);
15728 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15729 ImportVulkanFunctions_Dynamic();
15732 ValidateVulkanFunctions();
15735 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15737 void VmaAllocator_T::ImportVulkanFunctions_Static()
15740 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15741 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15742 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15743 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15744 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15745 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15746 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15747 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15748 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15749 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15750 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15751 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15752 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15753 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15754 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15755 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15756 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15759 #if VMA_VULKAN_VERSION >= 1001000
15760 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15762 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15763 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15764 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15765 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15766 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15771 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15773 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15775 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15777 #define VMA_COPY_IF_NOT_NULL(funcName) \
15778 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15780 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15781 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15782 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15783 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15784 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15785 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15786 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15787 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15788 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15789 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15790 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15791 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15792 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15793 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15794 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15795 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15796 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15798 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15799 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15800 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15803 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15804 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15805 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15808 #if VMA_MEMORY_BUDGET
15809 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15812 #undef VMA_COPY_IF_NOT_NULL
15815 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15817 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15819 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15820 if(m_VulkanFunctions.memberName == VMA_NULL) \
15821 m_VulkanFunctions.memberName = \
15822 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15823 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15824 if(m_VulkanFunctions.memberName == VMA_NULL) \
15825 m_VulkanFunctions.memberName = \
15826 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15828 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15829 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15830 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15831 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15832 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15833 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15834 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15835 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15836 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15837 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15838 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15839 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15840 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15841 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15842 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15843 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15844 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
15846 #if VMA_VULKAN_VERSION >= 1001000
15847 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15849 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
15850 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
15851 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
15852 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
15853 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
15857 #if VMA_DEDICATED_ALLOCATION
15858 if(m_UseKhrDedicatedAllocation)
15860 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
15861 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
15865 #if VMA_BIND_MEMORY2
15866 if(m_UseKhrBindMemory2)
15868 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
15869 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
15871 #endif // #if VMA_BIND_MEMORY2
15873 #if VMA_MEMORY_BUDGET
15874 if(m_UseExtMemoryBudget)
15876 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15878 #endif // #if VMA_MEMORY_BUDGET
15880 #undef VMA_FETCH_DEVICE_FUNC
15881 #undef VMA_FETCH_INSTANCE_FUNC
15884 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15886 void VmaAllocator_T::ValidateVulkanFunctions()
15888 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15889 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15890 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15891 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15892 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15893 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15894 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15895 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15896 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15897 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15898 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15899 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15900 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15901 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15902 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15903 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15904 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15906 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15907 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15909 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15910 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15914 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15915 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15917 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15918 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15922 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15923 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15925 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15930 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15932 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15933 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15934 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15935 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15938 VkResult VmaAllocator_T::AllocateMemoryOfType(
15940 VkDeviceSize alignment,
15941 bool dedicatedAllocation,
15942 VkBuffer dedicatedBuffer,
15943 VkBufferUsageFlags dedicatedBufferUsage,
15944 VkImage dedicatedImage,
15946 uint32_t memTypeIndex,
15947 VmaSuballocationType suballocType,
15948 size_t allocationCount,
15951 VMA_ASSERT(pAllocations != VMA_NULL);
15952 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15958 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15968 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15969 VMA_ASSERT(blockVector);
15971 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15972 bool preferDedicatedMemory =
15973 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15974 dedicatedAllocation ||
15976 size > preferredBlockSize / 2;
15978 if(preferDedicatedMemory &&
15980 finalCreateInfo.
pool == VK_NULL_HANDLE)
15989 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15993 return AllocateDedicatedMemory(
16002 dedicatedBufferUsage,
16010 VkResult res = blockVector->Allocate(
16011 m_CurrentFrameIndex.load(),
16018 if(res == VK_SUCCESS)
16026 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16030 res = AllocateDedicatedMemory(
16039 dedicatedBufferUsage,
16043 if(res == VK_SUCCESS)
16046 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
16052 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16059 VkResult VmaAllocator_T::AllocateDedicatedMemory(
16061 VmaSuballocationType suballocType,
16062 uint32_t memTypeIndex,
16065 bool isUserDataString,
16067 VkBuffer dedicatedBuffer,
16068 VkBufferUsageFlags dedicatedBufferUsage,
16069 VkImage dedicatedImage,
16070 size_t allocationCount,
16073 VMA_ASSERT(allocationCount > 0 && pAllocations);
16077 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16079 GetBudget(&heapBudget, heapIndex, 1);
16080 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
16082 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16086 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
16087 allocInfo.memoryTypeIndex = memTypeIndex;
16088 allocInfo.allocationSize = size;
16090 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16091 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
16092 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16094 if(dedicatedBuffer != VK_NULL_HANDLE)
16096 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
16097 dedicatedAllocInfo.buffer = dedicatedBuffer;
16098 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16100 else if(dedicatedImage != VK_NULL_HANDLE)
16102 dedicatedAllocInfo.image = dedicatedImage;
16103 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
16106 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16108 #if VMA_BUFFER_DEVICE_ADDRESS
16109 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
16110 if(m_UseKhrBufferDeviceAddress)
16112 bool canContainBufferWithDeviceAddress =
true;
16113 if(dedicatedBuffer != VK_NULL_HANDLE)
16115 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
16116 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
16118 else if(dedicatedImage != VK_NULL_HANDLE)
16120 canContainBufferWithDeviceAddress =
false;
16122 if(canContainBufferWithDeviceAddress)
16124 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
16125 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
16128 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
16131 VkResult res = VK_SUCCESS;
16132 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16134 res = AllocateDedicatedMemoryPage(
16142 pAllocations + allocIndex);
16143 if(res != VK_SUCCESS)
16149 if(res == VK_SUCCESS)
16153 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16154 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16155 VMA_ASSERT(pDedicatedAllocations);
16156 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
16158 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
16162 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
16167 while(allocIndex--)
16170 VkDeviceMemory hMemory = currAlloc->GetMemory();
16182 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
16183 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
16184 currAlloc->SetUserData(
this, VMA_NULL);
16185 m_AllocationObjectAllocator.Free(currAlloc);
16188 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16194 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
16196 VmaSuballocationType suballocType,
16197 uint32_t memTypeIndex,
16198 const VkMemoryAllocateInfo& allocInfo,
16200 bool isUserDataString,
16204 VkDeviceMemory hMemory = VK_NULL_HANDLE;
16205 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
16208 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
16212 void* pMappedData = VMA_NULL;
16215 res = (*m_VulkanFunctions.vkMapMemory)(
16224 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
16225 FreeVulkanMemory(memTypeIndex, size, hMemory);
16230 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
16231 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
16232 (*pAllocation)->SetUserData(
this, pUserData);
16233 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
16234 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16236 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
16242 void VmaAllocator_T::GetBufferMemoryRequirements(
16244 VkMemoryRequirements& memReq,
16245 bool& requiresDedicatedAllocation,
16246 bool& prefersDedicatedAllocation)
const
16248 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16249 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16251 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
16252 memReqInfo.buffer = hBuffer;
16254 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16256 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16257 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16259 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16261 memReq = memReq2.memoryRequirements;
16262 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16263 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16266 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16268 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
16269 requiresDedicatedAllocation =
false;
16270 prefersDedicatedAllocation =
false;
16274 void VmaAllocator_T::GetImageMemoryRequirements(
16276 VkMemoryRequirements& memReq,
16277 bool& requiresDedicatedAllocation,
16278 bool& prefersDedicatedAllocation)
const
16280 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16281 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
16283 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
16284 memReqInfo.image = hImage;
16286 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
16288 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
16289 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
16291 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
16293 memReq = memReq2.memoryRequirements;
16294 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
16295 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
16298 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16300 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16301 requiresDedicatedAllocation =
false;
16302 prefersDedicatedAllocation =
false;
16306 VkResult VmaAllocator_T::AllocateMemory(
16307 const VkMemoryRequirements& vkMemReq,
16308 bool requiresDedicatedAllocation,
16309 bool prefersDedicatedAllocation,
16310 VkBuffer dedicatedBuffer,
16311 VkBufferUsageFlags dedicatedBufferUsage,
16312 VkImage dedicatedImage,
16314 VmaSuballocationType suballocType,
16315 size_t allocationCount,
16318 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16320 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16322 if(vkMemReq.size == 0)
16324 return VK_ERROR_VALIDATION_FAILED_EXT;
16329 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16330 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16335 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16336 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16338 if(requiresDedicatedAllocation)
16342 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16343 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16345 if(createInfo.
pool != VK_NULL_HANDLE)
16347 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16348 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16351 if((createInfo.
pool != VK_NULL_HANDLE) &&
16354 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16355 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16358 if(createInfo.
pool != VK_NULL_HANDLE)
16360 const VkDeviceSize alignmentForPool = VMA_MAX(
16361 vkMemReq.alignment,
16362 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16367 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16372 return createInfo.
pool->m_BlockVector.Allocate(
16373 m_CurrentFrameIndex.load(),
16384 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16385 uint32_t memTypeIndex = UINT32_MAX;
16387 if(res == VK_SUCCESS)
16389 VkDeviceSize alignmentForMemType = VMA_MAX(
16390 vkMemReq.alignment,
16391 GetMemoryTypeMinAlignment(memTypeIndex));
16393 res = AllocateMemoryOfType(
16395 alignmentForMemType,
16396 requiresDedicatedAllocation || prefersDedicatedAllocation,
16398 dedicatedBufferUsage,
16406 if(res == VK_SUCCESS)
16416 memoryTypeBits &= ~(1u << memTypeIndex);
16419 if(res == VK_SUCCESS)
16421 alignmentForMemType = VMA_MAX(
16422 vkMemReq.alignment,
16423 GetMemoryTypeMinAlignment(memTypeIndex));
16425 res = AllocateMemoryOfType(
16427 alignmentForMemType,
16428 requiresDedicatedAllocation || prefersDedicatedAllocation,
16430 dedicatedBufferUsage,
16438 if(res == VK_SUCCESS)
16448 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16459 void VmaAllocator_T::FreeMemory(
16460 size_t allocationCount,
16463 VMA_ASSERT(pAllocations);
16465 for(
size_t allocIndex = allocationCount; allocIndex--; )
16469 if(allocation != VK_NULL_HANDLE)
16471 if(TouchAllocation(allocation))
16473 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16475 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16478 switch(allocation->GetType())
16480 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16482 VmaBlockVector* pBlockVector = VMA_NULL;
16483 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16484 if(hPool != VK_NULL_HANDLE)
16486 pBlockVector = &hPool->m_BlockVector;
16490 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16491 pBlockVector = m_pBlockVectors[memTypeIndex];
16493 pBlockVector->Free(allocation);
16496 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16497 FreeDedicatedMemory(allocation);
16505 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16506 allocation->SetUserData(
this, VMA_NULL);
16507 m_AllocationObjectAllocator.Free(allocation);
16512 VkResult VmaAllocator_T::ResizeAllocation(
16514 VkDeviceSize newSize)
16517 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16519 return VK_ERROR_VALIDATION_FAILED_EXT;
16521 if(newSize == alloc->GetSize())
16525 return VK_ERROR_OUT_OF_POOL_MEMORY;
16528 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16531 InitStatInfo(pStats->
total);
16532 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16534 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16538 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16540 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16541 VMA_ASSERT(pBlockVector);
16542 pBlockVector->AddStats(pStats);
16547 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16548 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16550 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16555 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16557 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16558 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16559 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16560 VMA_ASSERT(pDedicatedAllocVector);
16561 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16564 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16565 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16566 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16567 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16572 VmaPostprocessCalcStatInfo(pStats->
total);
16573 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16574 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16575 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16576 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16579 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16581 #if VMA_MEMORY_BUDGET
16582 if(m_UseExtMemoryBudget)
16584 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16586 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16587 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16589 const uint32_t heapIndex = firstHeap + i;
16591 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16594 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16596 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16597 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16601 outBudget->
usage = 0;
16605 outBudget->
budget = VMA_MIN(
16606 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16611 UpdateVulkanBudget();
16612 GetBudget(outBudget, firstHeap, heapCount);
16618 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16620 const uint32_t heapIndex = firstHeap + i;
16622 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16626 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16631 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16633 VkResult VmaAllocator_T::DefragmentationBegin(
16643 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16644 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16647 (*pContext)->AddAllocations(
16650 VkResult res = (*pContext)->Defragment(
16655 if(res != VK_NOT_READY)
16657 vma_delete(
this, *pContext);
16658 *pContext = VMA_NULL;
16664 VkResult VmaAllocator_T::DefragmentationEnd(
16667 vma_delete(
this, context);
16671 VkResult VmaAllocator_T::DefragmentationPassBegin(
16675 return context->DefragmentPassBegin(pInfo);
16677 VkResult VmaAllocator_T::DefragmentationPassEnd(
16680 return context->DefragmentPassEnd();
16686 if(hAllocation->CanBecomeLost())
16692 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16693 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16696 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16700 pAllocationInfo->
offset = 0;
16701 pAllocationInfo->
size = hAllocation->GetSize();
16703 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16706 else if(localLastUseFrameIndex == localCurrFrameIndex)
16708 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16709 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16710 pAllocationInfo->
offset = hAllocation->GetOffset();
16711 pAllocationInfo->
size = hAllocation->GetSize();
16713 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16718 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16720 localLastUseFrameIndex = localCurrFrameIndex;
16727 #if VMA_STATS_STRING_ENABLED
16728 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16729 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16732 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16733 if(localLastUseFrameIndex == localCurrFrameIndex)
16739 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16741 localLastUseFrameIndex = localCurrFrameIndex;
16747 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16748 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16749 pAllocationInfo->
offset = hAllocation->GetOffset();
16750 pAllocationInfo->
size = hAllocation->GetSize();
16751 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16752 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16756 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16759 if(hAllocation->CanBecomeLost())
16761 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16762 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16765 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16769 else if(localLastUseFrameIndex == localCurrFrameIndex)
16775 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16777 localLastUseFrameIndex = localCurrFrameIndex;
16784 #if VMA_STATS_STRING_ENABLED
16785 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16786 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16789 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16790 if(localLastUseFrameIndex == localCurrFrameIndex)
16796 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16798 localLastUseFrameIndex = localCurrFrameIndex;
16810 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16820 return VK_ERROR_INITIALIZATION_FAILED;
16824 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16826 return VK_ERROR_FEATURE_NOT_PRESENT;
16829 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16831 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16833 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16834 if(res != VK_SUCCESS)
16836 vma_delete(
this, *pPool);
16843 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16844 (*pPool)->SetId(m_NextPoolId++);
16845 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16851 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16855 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16856 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16857 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16860 vma_delete(
this, pool);
16865 pool->m_BlockVector.GetPoolStats(pPoolStats);
16868 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16870 m_CurrentFrameIndex.store(frameIndex);
16872 #if VMA_MEMORY_BUDGET
16873 if(m_UseExtMemoryBudget)
16875 UpdateVulkanBudget();
16877 #endif // #if VMA_MEMORY_BUDGET
16880 void VmaAllocator_T::MakePoolAllocationsLost(
16882 size_t* pLostAllocationCount)
16884 hPool->m_BlockVector.MakePoolAllocationsLost(
16885 m_CurrentFrameIndex.load(),
16886 pLostAllocationCount);
16889 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16891 return hPool->m_BlockVector.CheckCorruption();
16894 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16896 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16899 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16901 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16903 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16904 VMA_ASSERT(pBlockVector);
16905 VkResult localRes = pBlockVector->CheckCorruption();
16908 case VK_ERROR_FEATURE_NOT_PRESENT:
16911 finalRes = VK_SUCCESS;
16921 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16922 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16924 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16926 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16929 case VK_ERROR_FEATURE_NOT_PRESENT:
16932 finalRes = VK_SUCCESS;
16944 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16946 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16947 (*pAllocation)->InitLost();
16950 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16952 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16955 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16957 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16958 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16961 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16962 if(blockBytesAfterAllocation > heapSize)
16964 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16966 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16974 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16978 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16980 if(res == VK_SUCCESS)
16982 #if VMA_MEMORY_BUDGET
16983 ++m_Budget.m_OperationsSinceBudgetFetch;
16987 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16989 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
16994 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
17000 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
17003 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
17005 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
17009 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
17011 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
17014 VkResult VmaAllocator_T::BindVulkanBuffer(
17015 VkDeviceMemory memory,
17016 VkDeviceSize memoryOffset,
17020 if(pNext != VMA_NULL)
17022 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17023 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17024 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
17026 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
17027 bindBufferMemoryInfo.pNext = pNext;
17028 bindBufferMemoryInfo.buffer = buffer;
17029 bindBufferMemoryInfo.memory = memory;
17030 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17031 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17034 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17036 return VK_ERROR_EXTENSION_NOT_PRESENT;
17041 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
17045 VkResult VmaAllocator_T::BindVulkanImage(
17046 VkDeviceMemory memory,
17047 VkDeviceSize memoryOffset,
17051 if(pNext != VMA_NULL)
17053 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
17054 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
17055 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
17057 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
17058 bindBufferMemoryInfo.pNext = pNext;
17059 bindBufferMemoryInfo.image = image;
17060 bindBufferMemoryInfo.memory = memory;
17061 bindBufferMemoryInfo.memoryOffset = memoryOffset;
17062 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
17065 #endif // #if VMA_BIND_MEMORY2
17067 return VK_ERROR_EXTENSION_NOT_PRESENT;
17072 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
17076 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
17078 if(hAllocation->CanBecomeLost())
17080 return VK_ERROR_MEMORY_MAP_FAILED;
17083 switch(hAllocation->GetType())
17085 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17087 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17088 char *pBytes = VMA_NULL;
17089 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
17090 if(res == VK_SUCCESS)
17092 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
17093 hAllocation->BlockAllocMap();
17097 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17098 return hAllocation->DedicatedAllocMap(
this, ppData);
17101 return VK_ERROR_MEMORY_MAP_FAILED;
17107 switch(hAllocation->GetType())
17109 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17111 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17112 hAllocation->BlockAllocUnmap();
17113 pBlock->Unmap(
this, 1);
17116 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17117 hAllocation->DedicatedAllocUnmap(
this);
17124 VkResult VmaAllocator_T::BindBufferMemory(
17126 VkDeviceSize allocationLocalOffset,
17130 VkResult res = VK_SUCCESS;
17131 switch(hAllocation->GetType())
17133 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17134 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
17136 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17138 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
17139 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
17140 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
17149 VkResult VmaAllocator_T::BindImageMemory(
17151 VkDeviceSize allocationLocalOffset,
17155 VkResult res = VK_SUCCESS;
17156 switch(hAllocation->GetType())
17158 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17159 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
17161 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17163 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
17164 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
17165 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
17174 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
17176 VkDeviceSize offset, VkDeviceSize size,
17177 VMA_CACHE_OPERATION op)
17179 VkResult res = VK_SUCCESS;
17181 VkMappedMemoryRange memRange = {};
17182 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
17186 case VMA_CACHE_FLUSH:
17187 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
17189 case VMA_CACHE_INVALIDATE:
17190 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
17200 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
17201 uint32_t allocationCount,
17203 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
17204 VMA_CACHE_OPERATION op)
17206 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
17207 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
17208 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
17210 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
17213 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
17214 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
17215 VkMappedMemoryRange newRange;
17216 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
17218 ranges.push_back(newRange);
17222 VkResult res = VK_SUCCESS;
17223 if(!ranges.empty())
17227 case VMA_CACHE_FLUSH:
17228 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17230 case VMA_CACHE_INVALIDATE:
17231 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
17241 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
17243 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
17245 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17247 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17248 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
17249 VMA_ASSERT(pDedicatedAllocations);
17250 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
17251 VMA_ASSERT(success);
17254 VkDeviceMemory hMemory = allocation->GetMemory();
17266 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
17268 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
17271 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
17273 VkBufferCreateInfo dummyBufCreateInfo;
17274 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
17276 uint32_t memoryTypeBits = 0;
17279 VkBuffer buf = VK_NULL_HANDLE;
17280 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
17281 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
17282 if(res == VK_SUCCESS)
17285 VkMemoryRequirements memReq;
17286 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
17287 memoryTypeBits = memReq.memoryTypeBits;
17290 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17293 return memoryTypeBits;
17296 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17299 VMA_ASSERT(GetMemoryTypeCount() > 0);
17301 uint32_t memoryTypeBits = UINT32_MAX;
17303 if(!m_UseAmdDeviceCoherentMemory)
17306 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17308 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17310 memoryTypeBits &= ~(1u << memTypeIndex);
17315 return memoryTypeBits;
17318 bool VmaAllocator_T::GetFlushOrInvalidateRange(
17320 VkDeviceSize offset, VkDeviceSize size,
17321 VkMappedMemoryRange& outRange)
const
17323 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
17324 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
17326 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
17327 const VkDeviceSize allocationSize = allocation->GetSize();
17328 VMA_ASSERT(offset <= allocationSize);
17330 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
17331 outRange.pNext = VMA_NULL;
17332 outRange.memory = allocation->GetMemory();
17334 switch(allocation->GetType())
17336 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
17337 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17338 if(size == VK_WHOLE_SIZE)
17340 outRange.size = allocationSize - outRange.offset;
17344 VMA_ASSERT(offset + size <= allocationSize);
17345 outRange.size = VMA_MIN(
17346 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
17347 allocationSize - outRange.offset);
17350 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
17353 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
17354 if(size == VK_WHOLE_SIZE)
17356 size = allocationSize - offset;
17360 VMA_ASSERT(offset + size <= allocationSize);
17362 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
17365 const VkDeviceSize allocationOffset = allocation->GetOffset();
17366 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
17367 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
17368 outRange.offset += allocationOffset;
17369 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
17381 #if VMA_MEMORY_BUDGET
17383 void VmaAllocator_T::UpdateVulkanBudget()
17385 VMA_ASSERT(m_UseExtMemoryBudget);
17387 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17389 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17390 VmaPnextChainPushFront(&memProps, &budgetProps);
17392 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17395 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17397 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17399 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17400 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17401 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17404 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17406 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17408 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17410 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17412 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17414 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17417 m_Budget.m_OperationsSinceBudgetFetch = 0;
17421 #endif // #if VMA_MEMORY_BUDGET
17423 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17425 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17426 !hAllocation->CanBecomeLost() &&
17427 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17429 void* pData = VMA_NULL;
17430 VkResult res = Map(hAllocation, &pData);
17431 if(res == VK_SUCCESS)
17433 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17434 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17435 Unmap(hAllocation);
17439 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17444 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17446 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17447 if(memoryTypeBits == UINT32_MAX)
17449 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17450 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17452 return memoryTypeBits;
17455 #if VMA_STATS_STRING_ENABLED
17457 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17459 bool dedicatedAllocationsStarted =
false;
17460 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17462 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17463 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17464 VMA_ASSERT(pDedicatedAllocVector);
17465 if(pDedicatedAllocVector->empty() ==
false)
17467 if(dedicatedAllocationsStarted ==
false)
17469 dedicatedAllocationsStarted =
true;
17470 json.WriteString(
"DedicatedAllocations");
17471 json.BeginObject();
17474 json.BeginString(
"Type ");
17475 json.ContinueString(memTypeIndex);
17480 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17482 json.BeginObject(
true);
17484 hAlloc->PrintParameters(json);
17491 if(dedicatedAllocationsStarted)
17497 bool allocationsStarted =
false;
17498 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17500 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17502 if(allocationsStarted ==
false)
17504 allocationsStarted =
true;
17505 json.WriteString(
"DefaultPools");
17506 json.BeginObject();
17509 json.BeginString(
"Type ");
17510 json.ContinueString(memTypeIndex);
17513 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17516 if(allocationsStarted)
17524 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17525 const size_t poolCount = m_Pools.size();
17528 json.WriteString(
"Pools");
17529 json.BeginObject();
17530 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17532 json.BeginString();
17533 json.ContinueString(m_Pools[poolIndex]->GetId());
17536 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17543 #endif // #if VMA_STATS_STRING_ENABLED
17552 VMA_ASSERT(pCreateInfo && pAllocator);
17555 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17557 return (*pAllocator)->Init(pCreateInfo);
17563 if(allocator != VK_NULL_HANDLE)
17565 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17566 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17567 vma_delete(&allocationCallbacks, allocator);
17573 VMA_ASSERT(allocator && pAllocatorInfo);
17574 pAllocatorInfo->
instance = allocator->m_hInstance;
17575 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17576 pAllocatorInfo->
device = allocator->m_hDevice;
17581 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17583 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17584 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17589 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17591 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17592 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17597 uint32_t memoryTypeIndex,
17598 VkMemoryPropertyFlags* pFlags)
17600 VMA_ASSERT(allocator && pFlags);
17601 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17602 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17607 uint32_t frameIndex)
17609 VMA_ASSERT(allocator);
17610 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17612 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17614 allocator->SetCurrentFrameIndex(frameIndex);
17621 VMA_ASSERT(allocator && pStats);
17622 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17623 allocator->CalculateStats(pStats);
17630 VMA_ASSERT(allocator && pBudget);
17631 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17632 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17635 #if VMA_STATS_STRING_ENABLED
17639 char** ppStatsString,
17640 VkBool32 detailedMap)
17642 VMA_ASSERT(allocator && ppStatsString);
17643 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17645 VmaStringBuilder sb(allocator);
17647 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17648 json.BeginObject();
17651 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17654 allocator->CalculateStats(&stats);
17656 json.WriteString(
"Total");
17657 VmaPrintStatInfo(json, stats.
total);
17659 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17661 json.BeginString(
"Heap ");
17662 json.ContinueString(heapIndex);
17664 json.BeginObject();
17666 json.WriteString(
"Size");
17667 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17669 json.WriteString(
"Flags");
17670 json.BeginArray(
true);
17671 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17673 json.WriteString(
"DEVICE_LOCAL");
17677 json.WriteString(
"Budget");
17678 json.BeginObject();
17680 json.WriteString(
"BlockBytes");
17681 json.WriteNumber(budget[heapIndex].blockBytes);
17682 json.WriteString(
"AllocationBytes");
17683 json.WriteNumber(budget[heapIndex].allocationBytes);
17684 json.WriteString(
"Usage");
17685 json.WriteNumber(budget[heapIndex].usage);
17686 json.WriteString(
"Budget");
17687 json.WriteNumber(budget[heapIndex].budget);
17693 json.WriteString(
"Stats");
17694 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17697 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17699 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17701 json.BeginString(
"Type ");
17702 json.ContinueString(typeIndex);
17705 json.BeginObject();
17707 json.WriteString(
"Flags");
17708 json.BeginArray(
true);
17709 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17710 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17712 json.WriteString(
"DEVICE_LOCAL");
17714 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17716 json.WriteString(
"HOST_VISIBLE");
17718 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17720 json.WriteString(
"HOST_COHERENT");
17722 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17724 json.WriteString(
"HOST_CACHED");
17726 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17728 json.WriteString(
"LAZILY_ALLOCATED");
17730 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17732 json.WriteString(
" PROTECTED");
17734 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17736 json.WriteString(
" DEVICE_COHERENT");
17738 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17740 json.WriteString(
" DEVICE_UNCACHED");
17746 json.WriteString(
"Stats");
17747 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17756 if(detailedMap == VK_TRUE)
17758 allocator->PrintDetailedMap(json);
17764 const size_t len = sb.GetLength();
17765 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17768 memcpy(pChars, sb.GetData(), len);
17770 pChars[len] =
'\0';
17771 *ppStatsString = pChars;
17776 char* pStatsString)
17778 if(pStatsString != VMA_NULL)
17780 VMA_ASSERT(allocator);
17781 size_t len = strlen(pStatsString);
17782 vma_delete_array(allocator, pStatsString, len + 1);
17786 #endif // #if VMA_STATS_STRING_ENABLED
17793 uint32_t memoryTypeBits,
17795 uint32_t* pMemoryTypeIndex)
17797 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17798 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17799 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17801 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17808 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17809 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17810 uint32_t notPreferredFlags = 0;
17813 switch(pAllocationCreateInfo->
usage)
17818 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17820 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17824 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17827 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17828 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17830 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17834 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17835 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17838 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17841 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17850 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17852 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17855 *pMemoryTypeIndex = UINT32_MAX;
17856 uint32_t minCost = UINT32_MAX;
17857 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17858 memTypeIndex < allocator->GetMemoryTypeCount();
17859 ++memTypeIndex, memTypeBit <<= 1)
17862 if((memTypeBit & memoryTypeBits) != 0)
17864 const VkMemoryPropertyFlags currFlags =
17865 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17867 if((requiredFlags & ~currFlags) == 0)
17870 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17871 VmaCountBitsSet(currFlags & notPreferredFlags);
17873 if(currCost < minCost)
17875 *pMemoryTypeIndex = memTypeIndex;
17880 minCost = currCost;
17885 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17890 const VkBufferCreateInfo* pBufferCreateInfo,
17892 uint32_t* pMemoryTypeIndex)
17894 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17895 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17896 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17897 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17899 const VkDevice hDev = allocator->m_hDevice;
17900 VkBuffer hBuffer = VK_NULL_HANDLE;
17901 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17902 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17903 if(res == VK_SUCCESS)
17905 VkMemoryRequirements memReq = {};
17906 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17907 hDev, hBuffer, &memReq);
17911 memReq.memoryTypeBits,
17912 pAllocationCreateInfo,
17915 allocator->GetVulkanFunctions().vkDestroyBuffer(
17916 hDev, hBuffer, allocator->GetAllocationCallbacks());
17923 const VkImageCreateInfo* pImageCreateInfo,
17925 uint32_t* pMemoryTypeIndex)
17927 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17928 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17929 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17930 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17932 const VkDevice hDev = allocator->m_hDevice;
17933 VkImage hImage = VK_NULL_HANDLE;
17934 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17935 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17936 if(res == VK_SUCCESS)
17938 VkMemoryRequirements memReq = {};
17939 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17940 hDev, hImage, &memReq);
17944 memReq.memoryTypeBits,
17945 pAllocationCreateInfo,
17948 allocator->GetVulkanFunctions().vkDestroyImage(
17949 hDev, hImage, allocator->GetAllocationCallbacks());
17959 VMA_ASSERT(allocator && pCreateInfo && pPool);
17961 VMA_DEBUG_LOG(
"vmaCreatePool");
17963 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17965 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17967 #if VMA_RECORDING_ENABLED
17968 if(allocator->GetRecorder() != VMA_NULL)
17970 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17981 VMA_ASSERT(allocator);
17983 if(pool == VK_NULL_HANDLE)
17988 VMA_DEBUG_LOG(
"vmaDestroyPool");
17990 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17992 #if VMA_RECORDING_ENABLED
17993 if(allocator->GetRecorder() != VMA_NULL)
17995 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17999 allocator->DestroyPool(pool);
18007 VMA_ASSERT(allocator && pool && pPoolStats);
18009 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18011 allocator->GetPoolStats(pool, pPoolStats);
18017 size_t* pLostAllocationCount)
18019 VMA_ASSERT(allocator && pool);
18021 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18023 #if VMA_RECORDING_ENABLED
18024 if(allocator->GetRecorder() != VMA_NULL)
18026 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
18030 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
18035 VMA_ASSERT(allocator && pool);
18037 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18039 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
18041 return allocator->CheckPoolCorruption(pool);
18047 const char** ppName)
18049 VMA_ASSERT(allocator && pool && ppName);
18051 VMA_DEBUG_LOG(
"vmaGetPoolName");
18053 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18055 *ppName = pool->GetName();
18063 VMA_ASSERT(allocator && pool);
18065 VMA_DEBUG_LOG(
"vmaSetPoolName");
18067 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18069 pool->SetName(pName);
18071 #if VMA_RECORDING_ENABLED
18072 if(allocator->GetRecorder() != VMA_NULL)
18074 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
18081 const VkMemoryRequirements* pVkMemoryRequirements,
18086 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
18088 VMA_DEBUG_LOG(
"vmaAllocateMemory");
18090 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18092 VkResult result = allocator->AllocateMemory(
18093 *pVkMemoryRequirements,
18100 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18104 #if VMA_RECORDING_ENABLED
18105 if(allocator->GetRecorder() != VMA_NULL)
18107 allocator->GetRecorder()->RecordAllocateMemory(
18108 allocator->GetCurrentFrameIndex(),
18109 *pVkMemoryRequirements,
18115 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18117 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18125 const VkMemoryRequirements* pVkMemoryRequirements,
18127 size_t allocationCount,
18131 if(allocationCount == 0)
18136 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
18138 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
18140 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18142 VkResult result = allocator->AllocateMemory(
18143 *pVkMemoryRequirements,
18150 VMA_SUBALLOCATION_TYPE_UNKNOWN,
18154 #if VMA_RECORDING_ENABLED
18155 if(allocator->GetRecorder() != VMA_NULL)
18157 allocator->GetRecorder()->RecordAllocateMemoryPages(
18158 allocator->GetCurrentFrameIndex(),
18159 *pVkMemoryRequirements,
18161 (uint64_t)allocationCount,
18166 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
18168 for(
size_t i = 0; i < allocationCount; ++i)
18170 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
18184 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18186 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
18188 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18190 VkMemoryRequirements vkMemReq = {};
18191 bool requiresDedicatedAllocation =
false;
18192 bool prefersDedicatedAllocation =
false;
18193 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
18194 requiresDedicatedAllocation,
18195 prefersDedicatedAllocation);
18197 VkResult result = allocator->AllocateMemory(
18199 requiresDedicatedAllocation,
18200 prefersDedicatedAllocation,
18205 VMA_SUBALLOCATION_TYPE_BUFFER,
18209 #if VMA_RECORDING_ENABLED
18210 if(allocator->GetRecorder() != VMA_NULL)
18212 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
18213 allocator->GetCurrentFrameIndex(),
18215 requiresDedicatedAllocation,
18216 prefersDedicatedAllocation,
18222 if(pAllocationInfo && result == VK_SUCCESS)
18224 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18237 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
18239 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
18241 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18243 VkMemoryRequirements vkMemReq = {};
18244 bool requiresDedicatedAllocation =
false;
18245 bool prefersDedicatedAllocation =
false;
18246 allocator->GetImageMemoryRequirements(image, vkMemReq,
18247 requiresDedicatedAllocation, prefersDedicatedAllocation);
18249 VkResult result = allocator->AllocateMemory(
18251 requiresDedicatedAllocation,
18252 prefersDedicatedAllocation,
18257 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
18261 #if VMA_RECORDING_ENABLED
18262 if(allocator->GetRecorder() != VMA_NULL)
18264 allocator->GetRecorder()->RecordAllocateMemoryForImage(
18265 allocator->GetCurrentFrameIndex(),
18267 requiresDedicatedAllocation,
18268 prefersDedicatedAllocation,
18274 if(pAllocationInfo && result == VK_SUCCESS)
18276 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18286 VMA_ASSERT(allocator);
18288 if(allocation == VK_NULL_HANDLE)
18293 VMA_DEBUG_LOG(
"vmaFreeMemory");
18295 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18297 #if VMA_RECORDING_ENABLED
18298 if(allocator->GetRecorder() != VMA_NULL)
18300 allocator->GetRecorder()->RecordFreeMemory(
18301 allocator->GetCurrentFrameIndex(),
18306 allocator->FreeMemory(
18313 size_t allocationCount,
18316 if(allocationCount == 0)
18321 VMA_ASSERT(allocator);
18323 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
18325 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18327 #if VMA_RECORDING_ENABLED
18328 if(allocator->GetRecorder() != VMA_NULL)
18330 allocator->GetRecorder()->RecordFreeMemoryPages(
18331 allocator->GetCurrentFrameIndex(),
18332 (uint64_t)allocationCount,
18337 allocator->FreeMemory(allocationCount, pAllocations);
18343 VkDeviceSize newSize)
18345 VMA_ASSERT(allocator && allocation);
18347 VMA_DEBUG_LOG(
"vmaResizeAllocation");
18349 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18351 return allocator->ResizeAllocation(allocation, newSize);
18359 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18361 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18363 #if VMA_RECORDING_ENABLED
18364 if(allocator->GetRecorder() != VMA_NULL)
18366 allocator->GetRecorder()->RecordGetAllocationInfo(
18367 allocator->GetCurrentFrameIndex(),
18372 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18379 VMA_ASSERT(allocator && allocation);
18381 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18383 #if VMA_RECORDING_ENABLED
18384 if(allocator->GetRecorder() != VMA_NULL)
18386 allocator->GetRecorder()->RecordTouchAllocation(
18387 allocator->GetCurrentFrameIndex(),
18392 return allocator->TouchAllocation(allocation);
18400 VMA_ASSERT(allocator && allocation);
18402 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18404 allocation->SetUserData(allocator, pUserData);
18406 #if VMA_RECORDING_ENABLED
18407 if(allocator->GetRecorder() != VMA_NULL)
18409 allocator->GetRecorder()->RecordSetAllocationUserData(
18410 allocator->GetCurrentFrameIndex(),
18421 VMA_ASSERT(allocator && pAllocation);
18423 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18425 allocator->CreateLostAllocation(pAllocation);
18427 #if VMA_RECORDING_ENABLED
18428 if(allocator->GetRecorder() != VMA_NULL)
18430 allocator->GetRecorder()->RecordCreateLostAllocation(
18431 allocator->GetCurrentFrameIndex(),
18442 VMA_ASSERT(allocator && allocation && ppData);
18444 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18446 VkResult res = allocator->Map(allocation, ppData);
18448 #if VMA_RECORDING_ENABLED
18449 if(allocator->GetRecorder() != VMA_NULL)
18451 allocator->GetRecorder()->RecordMapMemory(
18452 allocator->GetCurrentFrameIndex(),
18464 VMA_ASSERT(allocator && allocation);
18466 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18468 #if VMA_RECORDING_ENABLED
18469 if(allocator->GetRecorder() != VMA_NULL)
18471 allocator->GetRecorder()->RecordUnmapMemory(
18472 allocator->GetCurrentFrameIndex(),
18477 allocator->Unmap(allocation);
18482 VMA_ASSERT(allocator && allocation);
18484 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18486 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18488 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18490 #if VMA_RECORDING_ENABLED
18491 if(allocator->GetRecorder() != VMA_NULL)
18493 allocator->GetRecorder()->RecordFlushAllocation(
18494 allocator->GetCurrentFrameIndex(),
18495 allocation, offset, size);
18504 VMA_ASSERT(allocator && allocation);
18506 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18508 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18510 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18512 #if VMA_RECORDING_ENABLED
18513 if(allocator->GetRecorder() != VMA_NULL)
18515 allocator->GetRecorder()->RecordInvalidateAllocation(
18516 allocator->GetCurrentFrameIndex(),
18517 allocation, offset, size);
18526 uint32_t allocationCount,
18528 const VkDeviceSize* offsets,
18529 const VkDeviceSize* sizes)
18531 VMA_ASSERT(allocator);
18533 if(allocationCount == 0)
18538 VMA_ASSERT(allocations);
18540 VMA_DEBUG_LOG(
"vmaFlushAllocations");
18542 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18544 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
18546 #if VMA_RECORDING_ENABLED
18547 if(allocator->GetRecorder() != VMA_NULL)
18558 uint32_t allocationCount,
18560 const VkDeviceSize* offsets,
18561 const VkDeviceSize* sizes)
18563 VMA_ASSERT(allocator);
18565 if(allocationCount == 0)
18570 VMA_ASSERT(allocations);
18572 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
18574 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18576 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
18578 #if VMA_RECORDING_ENABLED
18579 if(allocator->GetRecorder() != VMA_NULL)
18590 VMA_ASSERT(allocator);
18592 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18594 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18596 return allocator->CheckCorruption(memoryTypeBits);
18602 size_t allocationCount,
18603 VkBool32* pAllocationsChanged,
18613 if(pDefragmentationInfo != VMA_NULL)
18627 if(res == VK_NOT_READY)
18640 VMA_ASSERT(allocator && pInfo && pContext);
18651 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18653 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18655 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18657 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18659 #if VMA_RECORDING_ENABLED
18660 if(allocator->GetRecorder() != VMA_NULL)
18662 allocator->GetRecorder()->RecordDefragmentationBegin(
18663 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18674 VMA_ASSERT(allocator);
18676 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18678 if(context != VK_NULL_HANDLE)
18680 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18682 #if VMA_RECORDING_ENABLED
18683 if(allocator->GetRecorder() != VMA_NULL)
18685 allocator->GetRecorder()->RecordDefragmentationEnd(
18686 allocator->GetCurrentFrameIndex(), context);
18690 return allocator->DefragmentationEnd(context);
18704 VMA_ASSERT(allocator);
18707 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18709 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18711 if(context == VK_NULL_HANDLE)
18717 return allocator->DefragmentationPassBegin(pInfo, context);
18723 VMA_ASSERT(allocator);
18725 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18726 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18728 if(context == VK_NULL_HANDLE)
18731 return allocator->DefragmentationPassEnd(context);
18739 VMA_ASSERT(allocator && allocation && buffer);
18741 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18743 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18745 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18751 VkDeviceSize allocationLocalOffset,
18755 VMA_ASSERT(allocator && allocation && buffer);
18757 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18759 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18761 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18769 VMA_ASSERT(allocator && allocation && image);
18771 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18773 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18775 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18781 VkDeviceSize allocationLocalOffset,
18785 VMA_ASSERT(allocator && allocation && image);
18787 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18789 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18791 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18796 const VkBufferCreateInfo* pBufferCreateInfo,
18802 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18804 if(pBufferCreateInfo->size == 0)
18806 return VK_ERROR_VALIDATION_FAILED_EXT;
18808 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18809 !allocator->m_UseKhrBufferDeviceAddress)
18811 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18812 return VK_ERROR_VALIDATION_FAILED_EXT;
18815 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18817 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18819 *pBuffer = VK_NULL_HANDLE;
18820 *pAllocation = VK_NULL_HANDLE;
18823 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18824 allocator->m_hDevice,
18826 allocator->GetAllocationCallbacks(),
18831 VkMemoryRequirements vkMemReq = {};
18832 bool requiresDedicatedAllocation =
false;
18833 bool prefersDedicatedAllocation =
false;
18834 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18835 requiresDedicatedAllocation, prefersDedicatedAllocation);
18838 res = allocator->AllocateMemory(
18840 requiresDedicatedAllocation,
18841 prefersDedicatedAllocation,
18843 pBufferCreateInfo->usage,
18845 *pAllocationCreateInfo,
18846 VMA_SUBALLOCATION_TYPE_BUFFER,
18850 #if VMA_RECORDING_ENABLED
18851 if(allocator->GetRecorder() != VMA_NULL)
18853 allocator->GetRecorder()->RecordCreateBuffer(
18854 allocator->GetCurrentFrameIndex(),
18855 *pBufferCreateInfo,
18856 *pAllocationCreateInfo,
18866 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18871 #if VMA_STATS_STRING_ENABLED
18872 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18874 if(pAllocationInfo != VMA_NULL)
18876 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18881 allocator->FreeMemory(
18884 *pAllocation = VK_NULL_HANDLE;
18885 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18886 *pBuffer = VK_NULL_HANDLE;
18889 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18890 *pBuffer = VK_NULL_HANDLE;
18901 VMA_ASSERT(allocator);
18903 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18908 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18912 #if VMA_RECORDING_ENABLED
18913 if(allocator->GetRecorder() != VMA_NULL)
18915 allocator->GetRecorder()->RecordDestroyBuffer(
18916 allocator->GetCurrentFrameIndex(),
18921 if(buffer != VK_NULL_HANDLE)
18923 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18926 if(allocation != VK_NULL_HANDLE)
18928 allocator->FreeMemory(
18936 const VkImageCreateInfo* pImageCreateInfo,
18942 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18944 if(pImageCreateInfo->extent.width == 0 ||
18945 pImageCreateInfo->extent.height == 0 ||
18946 pImageCreateInfo->extent.depth == 0 ||
18947 pImageCreateInfo->mipLevels == 0 ||
18948 pImageCreateInfo->arrayLayers == 0)
18950 return VK_ERROR_VALIDATION_FAILED_EXT;
18953 VMA_DEBUG_LOG(
"vmaCreateImage");
18955 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18957 *pImage = VK_NULL_HANDLE;
18958 *pAllocation = VK_NULL_HANDLE;
18961 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18962 allocator->m_hDevice,
18964 allocator->GetAllocationCallbacks(),
18968 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18969 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18970 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18973 VkMemoryRequirements vkMemReq = {};
18974 bool requiresDedicatedAllocation =
false;
18975 bool prefersDedicatedAllocation =
false;
18976 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18977 requiresDedicatedAllocation, prefersDedicatedAllocation);
18979 res = allocator->AllocateMemory(
18981 requiresDedicatedAllocation,
18982 prefersDedicatedAllocation,
18986 *pAllocationCreateInfo,
18991 #if VMA_RECORDING_ENABLED
18992 if(allocator->GetRecorder() != VMA_NULL)
18994 allocator->GetRecorder()->RecordCreateImage(
18995 allocator->GetCurrentFrameIndex(),
18997 *pAllocationCreateInfo,
19007 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
19012 #if VMA_STATS_STRING_ENABLED
19013 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
19015 if(pAllocationInfo != VMA_NULL)
19017 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
19022 allocator->FreeMemory(
19025 *pAllocation = VK_NULL_HANDLE;
19026 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19027 *pImage = VK_NULL_HANDLE;
19030 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
19031 *pImage = VK_NULL_HANDLE;
19042 VMA_ASSERT(allocator);
19044 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
19049 VMA_DEBUG_LOG(
"vmaDestroyImage");
19051 VMA_DEBUG_GLOBAL_MUTEX_LOCK
19053 #if VMA_RECORDING_ENABLED
19054 if(allocator->GetRecorder() != VMA_NULL)
19056 allocator->GetRecorder()->RecordDestroyImage(
19057 allocator->GetCurrentFrameIndex(),
19062 if(image != VK_NULL_HANDLE)
19064 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
19066 if(allocation != VK_NULL_HANDLE)
19068 allocator->FreeMemory(
19074 #endif // #ifdef VMA_IMPLEMENTATION