23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1893 #ifndef VMA_RECORDING_ENABLED
1894 #define VMA_RECORDING_ENABLED 0
1898 #define NOMINMAX // For windows.h
1901 #if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS
1902 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
1903 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
1904 extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
1905 extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
1906 extern PFN_vkAllocateMemory vkAllocateMemory;
1907 extern PFN_vkFreeMemory vkFreeMemory;
1908 extern PFN_vkMapMemory vkMapMemory;
1909 extern PFN_vkUnmapMemory vkUnmapMemory;
1910 extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
1911 extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
1912 extern PFN_vkBindBufferMemory vkBindBufferMemory;
1913 extern PFN_vkBindImageMemory vkBindImageMemory;
1914 extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
1915 extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
1916 extern PFN_vkCreateBuffer vkCreateBuffer;
1917 extern PFN_vkDestroyBuffer vkDestroyBuffer;
1918 extern PFN_vkCreateImage vkCreateImage;
1919 extern PFN_vkDestroyImage vkDestroyImage;
1920 extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
1921 #if VMA_VULKAN_VERSION >= 1001000
1922 extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
1923 extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
1924 extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
1925 extern PFN_vkBindImageMemory2 vkBindImageMemory2;
1926 extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
1927 #endif // #if VMA_VULKAN_VERSION >= 1001000
1928 #endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES
1931 #include <vulkan/vulkan.h>
1934 #if VMA_RECORDING_ENABLED
1936 #include <windows.h>
1938 #error VMA Recording functionality is not yet available for non-Windows platforms
1945 #if !defined(VMA_VULKAN_VERSION)
1946 #if defined(VK_VERSION_1_2)
1947 #define VMA_VULKAN_VERSION 1002000
1948 #elif defined(VK_VERSION_1_1)
1949 #define VMA_VULKAN_VERSION 1001000
1951 #define VMA_VULKAN_VERSION 1000000
1955 #if !defined(VMA_DEDICATED_ALLOCATION)
1956 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1957 #define VMA_DEDICATED_ALLOCATION 1
1959 #define VMA_DEDICATED_ALLOCATION 0
1963 #if !defined(VMA_BIND_MEMORY2)
1964 #if VK_KHR_bind_memory2
1965 #define VMA_BIND_MEMORY2 1
1967 #define VMA_BIND_MEMORY2 0
1971 #if !defined(VMA_MEMORY_BUDGET)
1972 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1973 #define VMA_MEMORY_BUDGET 1
1975 #define VMA_MEMORY_BUDGET 0
1980 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
1981 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
1982 #define VMA_BUFFER_DEVICE_ADDRESS 1
1984 #define VMA_BUFFER_DEVICE_ADDRESS 0
1993 #ifndef VMA_CALL_PRE
1994 #define VMA_CALL_PRE
1996 #ifndef VMA_CALL_POST
1997 #define VMA_CALL_POST
2011 #ifndef VMA_LEN_IF_NOT_NULL
2012 #define VMA_LEN_IF_NOT_NULL(len)
2017 #ifndef VMA_NULLABLE
2019 #define VMA_NULLABLE _Nullable
2021 #define VMA_NULLABLE
2027 #ifndef VMA_NOT_NULL
2029 #define VMA_NOT_NULL _Nonnull
2031 #define VMA_NOT_NULL
2037 #ifndef VMA_NOT_NULL_NON_DISPATCHABLE
2038 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2039 #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL
2041 #define VMA_NOT_NULL_NON_DISPATCHABLE
2045 #ifndef VMA_NULLABLE_NON_DISPATCHABLE
2046 #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
2047 #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE
2049 #define VMA_NULLABLE_NON_DISPATCHABLE
2067 uint32_t memoryType,
2068 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2070 void* VMA_NULLABLE pUserData);
2074 uint32_t memoryType,
2075 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory,
2077 void* VMA_NULLABLE pUserData);
2217 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2218 PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR;
2219 PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR;
2221 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2222 PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR;
2223 PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR;
2225 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2226 PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR;
2316 const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(
"VkPhysicalDeviceMemoryProperties::memoryHeapCount")
pHeapSizeLimit;
2389 const VkPhysicalDeviceProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceProperties);
2397 const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE * VMA_NOT_NULL ppPhysicalDeviceMemoryProperties);
2407 uint32_t memoryTypeIndex,
2408 VkMemoryPropertyFlags* VMA_NOT_NULL pFlags);
2420 uint32_t frameIndex);
2516 #ifndef VMA_STATS_STRING_ENABLED
2517 #define VMA_STATS_STRING_ENABLED 1
2520 #if VMA_STATS_STRING_ENABLED
2527 char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString,
2528 VkBool32 detailedMap);
2532 char* VMA_NULLABLE pStatsString);
2534 #endif // #if VMA_STATS_STRING_ENABLED
2786 uint32_t memoryTypeBits,
2788 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2804 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
2806 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2822 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
2824 uint32_t* VMA_NOT_NULL pMemoryTypeIndex);
2968 VmaPool VMA_NULLABLE * VMA_NOT_NULL pPool);
2996 size_t* VMA_NULLABLE pLostAllocationCount);
3023 const char* VMA_NULLABLE * VMA_NOT_NULL ppName);
3033 const char* VMA_NULLABLE pName);
3116 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3142 const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements,
3144 size_t allocationCount,
3145 VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3146 VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo);
3156 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3164 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3189 size_t allocationCount,
3190 const VmaAllocation VMA_NULLABLE * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations);
3202 VkDeviceSize newSize);
3259 void* VMA_NULLABLE pUserData);
3316 void* VMA_NULLABLE * VMA_NOT_NULL ppData);
3351 VkDeviceSize offset,
3375 VkDeviceSize offset,
3455 const VmaPool VMA_NOT_NULL * VMA_NULLABLE VMA_LEN_IF_NOT_NULL(poolCount)
pPools;
3489 VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE
memory;
3627 const VmaAllocation VMA_NOT_NULL * VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations,
3628 size_t allocationCount,
3629 VkBool32* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationsChanged,
3648 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer);
3663 VkDeviceSize allocationLocalOffset,
3664 VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer,
3665 const void* VMA_NULLABLE pNext);
3682 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image);
3697 VkDeviceSize allocationLocalOffset,
3698 VkImage VMA_NOT_NULL_NON_DISPATCHABLE image,
3699 const void* VMA_NULLABLE pNext);
3729 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
3731 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
3748 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
3754 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
3756 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
3773 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
3780 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3783 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3784 #define VMA_IMPLEMENTATION
3787 #ifdef VMA_IMPLEMENTATION
3788 #undef VMA_IMPLEMENTATION
3808 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3809 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3818 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
3819 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
3831 #if VMA_USE_STL_CONTAINERS
3832 #define VMA_USE_STL_VECTOR 1
3833 #define VMA_USE_STL_UNORDERED_MAP 1
3834 #define VMA_USE_STL_LIST 1
3837 #ifndef VMA_USE_STL_SHARED_MUTEX
3839 #if __cplusplus >= 201703L
3840 #define VMA_USE_STL_SHARED_MUTEX 1
3844 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3845 #define VMA_USE_STL_SHARED_MUTEX 1
3847 #define VMA_USE_STL_SHARED_MUTEX 0
3855 #if VMA_USE_STL_VECTOR
3859 #if VMA_USE_STL_UNORDERED_MAP
3860 #include <unordered_map>
3863 #if VMA_USE_STL_LIST
3872 #include <algorithm>
3877 #define VMA_NULL nullptr
3880 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3882 void *aligned_alloc(
size_t alignment,
size_t size)
3885 if(alignment <
sizeof(
void*))
3887 alignment =
sizeof(
void*);
3890 return memalign(alignment, size);
3892 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3894 void *aligned_alloc(
size_t alignment,
size_t size)
3897 if(alignment <
sizeof(
void*))
3899 alignment =
sizeof(
void*);
3903 if(posix_memalign(&pointer, alignment, size) == 0)
3917 #define VMA_ASSERT(expr)
3919 #define VMA_ASSERT(expr) assert(expr)
3925 #ifndef VMA_HEAVY_ASSERT
3927 #define VMA_HEAVY_ASSERT(expr)
3929 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3933 #ifndef VMA_ALIGN_OF
3934 #define VMA_ALIGN_OF(type) (__alignof(type))
3937 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3939 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3941 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3945 #ifndef VMA_SYSTEM_FREE
3947 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3949 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3954 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3958 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3962 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3966 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3969 #ifndef VMA_DEBUG_LOG
3970 #define VMA_DEBUG_LOG(format, ...)
3980 #if VMA_STATS_STRING_ENABLED
3981 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3983 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
3985 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3987 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
3989 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3991 snprintf(outStr, strLen,
"%p", ptr);
3999 void Lock() { m_Mutex.lock(); }
4000 void Unlock() { m_Mutex.unlock(); }
4001 bool TryLock() {
return m_Mutex.try_lock(); }
4005 #define VMA_MUTEX VmaMutex
4009 #ifndef VMA_RW_MUTEX
4010 #if VMA_USE_STL_SHARED_MUTEX
4012 #include <shared_mutex>
4016 void LockRead() { m_Mutex.lock_shared(); }
4017 void UnlockRead() { m_Mutex.unlock_shared(); }
4018 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
4019 void LockWrite() { m_Mutex.lock(); }
4020 void UnlockWrite() { m_Mutex.unlock(); }
4021 bool TryLockWrite() {
return m_Mutex.try_lock(); }
4023 std::shared_mutex m_Mutex;
4025 #define VMA_RW_MUTEX VmaRWMutex
4026 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
4032 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
4033 void LockRead() { AcquireSRWLockShared(&m_Lock); }
4034 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
4035 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
4036 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
4037 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
4038 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
4042 #define VMA_RW_MUTEX VmaRWMutex
4048 void LockRead() { m_Mutex.Lock(); }
4049 void UnlockRead() { m_Mutex.Unlock(); }
4050 bool TryLockRead() {
return m_Mutex.TryLock(); }
4051 void LockWrite() { m_Mutex.Lock(); }
4052 void UnlockWrite() { m_Mutex.Unlock(); }
4053 bool TryLockWrite() {
return m_Mutex.TryLock(); }
4057 #define VMA_RW_MUTEX VmaRWMutex
4058 #endif // #if VMA_USE_STL_SHARED_MUTEX
4059 #endif // #ifndef VMA_RW_MUTEX
4064 #ifndef VMA_ATOMIC_UINT32
4066 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
4069 #ifndef VMA_ATOMIC_UINT64
4071 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
4074 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
4079 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
4082 #ifndef VMA_DEBUG_ALIGNMENT
4087 #define VMA_DEBUG_ALIGNMENT (1)
4090 #ifndef VMA_DEBUG_MARGIN
4095 #define VMA_DEBUG_MARGIN (0)
4098 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4103 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4106 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4112 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4115 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4120 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4123 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4128 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4131 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4132 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4136 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4137 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4141 #ifndef VMA_CLASS_NO_COPY
4142 #define VMA_CLASS_NO_COPY(className) \
4144 className(const className&) = delete; \
4145 className& operator=(const className&) = delete;
4148 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4151 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4153 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4154 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4162 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4163 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4164 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4166 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4168 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4169 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4172 static inline uint32_t VmaCountBitsSet(uint32_t v)
4174 uint32_t c = v - ((v >> 1) & 0x55555555);
4175 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4176 c = ((c >> 4) + c) & 0x0F0F0F0F;
4177 c = ((c >> 8) + c) & 0x00FF00FF;
4178 c = ((c >> 16) + c) & 0x0000FFFF;
4184 template <
typename T>
4185 static inline T VmaAlignUp(T val, T align)
4187 return (val + align - 1) / align * align;
4191 template <
typename T>
4192 static inline T VmaAlignDown(T val, T align)
4194 return val / align * align;
4198 template <
typename T>
4199 static inline T VmaRoundDiv(T x, T y)
4201 return (x + (y / (T)2)) / y;
4209 template <
typename T>
4210 inline bool VmaIsPow2(T x)
4212 return (x & (x-1)) == 0;
4216 static inline uint32_t VmaNextPow2(uint32_t v)
4227 static inline uint64_t VmaNextPow2(uint64_t v)
4241 static inline uint32_t VmaPrevPow2(uint32_t v)
4251 static inline uint64_t VmaPrevPow2(uint64_t v)
4263 static inline bool VmaStrIsEmpty(
const char* pStr)
4265 return pStr == VMA_NULL || *pStr ==
'\0';
4268 #if VMA_STATS_STRING_ENABLED
4270 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4286 #endif // #if VMA_STATS_STRING_ENABLED
4290 template<
typename Iterator,
typename Compare>
4291 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4293 Iterator centerValue = end; --centerValue;
4294 Iterator insertIndex = beg;
4295 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4297 if(cmp(*memTypeIndex, *centerValue))
4299 if(insertIndex != memTypeIndex)
4301 VMA_SWAP(*memTypeIndex, *insertIndex);
4306 if(insertIndex != centerValue)
4308 VMA_SWAP(*insertIndex, *centerValue);
4313 template<
typename Iterator,
typename Compare>
4314 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4318 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4319 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4320 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4324 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4326 #endif // #ifndef VMA_SORT
4335 static inline bool VmaBlocksOnSamePage(
4336 VkDeviceSize resourceAOffset,
4337 VkDeviceSize resourceASize,
4338 VkDeviceSize resourceBOffset,
4339 VkDeviceSize pageSize)
4341 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4342 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4343 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4344 VkDeviceSize resourceBStart = resourceBOffset;
4345 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4346 return resourceAEndPage == resourceBStartPage;
4349 enum VmaSuballocationType
4351 VMA_SUBALLOCATION_TYPE_FREE = 0,
4352 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4353 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4354 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4355 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4356 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4357 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4366 static inline bool VmaIsBufferImageGranularityConflict(
4367 VmaSuballocationType suballocType1,
4368 VmaSuballocationType suballocType2)
4370 if(suballocType1 > suballocType2)
4372 VMA_SWAP(suballocType1, suballocType2);
4375 switch(suballocType1)
4377 case VMA_SUBALLOCATION_TYPE_FREE:
4379 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4381 case VMA_SUBALLOCATION_TYPE_BUFFER:
4383 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4384 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4385 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4387 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4388 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4389 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4390 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4392 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4393 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4401 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4403 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4404 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4405 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4406 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4408 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4415 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4417 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4418 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4419 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4420 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4422 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4435 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4437 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4438 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4439 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4440 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4446 VMA_CLASS_NO_COPY(VmaMutexLock)
4448 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4449 m_pMutex(useMutex ? &mutex : VMA_NULL)
4450 {
if(m_pMutex) { m_pMutex->Lock(); } }
4452 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4454 VMA_MUTEX* m_pMutex;
4458 struct VmaMutexLockRead
4460 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4462 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4463 m_pMutex(useMutex ? &mutex : VMA_NULL)
4464 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4465 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4467 VMA_RW_MUTEX* m_pMutex;
4471 struct VmaMutexLockWrite
4473 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4475 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4476 m_pMutex(useMutex ? &mutex : VMA_NULL)
4477 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4478 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4480 VMA_RW_MUTEX* m_pMutex;
4483 #if VMA_DEBUG_GLOBAL_MUTEX
4484 static VMA_MUTEX gDebugGlobalMutex;
4485 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4487 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4491 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4502 template <
typename CmpLess,
typename IterT,
typename KeyT>
4503 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4505 size_t down = 0, up = (end - beg);
4508 const size_t mid = (down + up) / 2;
4509 if(cmp(*(beg+mid), key))
4521 template<
typename CmpLess,
typename IterT,
typename KeyT>
4522 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4524 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4525 beg, end, value, cmp);
4527 (!cmp(*it, value) && !cmp(value, *it)))
4539 template<
typename T>
4540 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4542 for(uint32_t i = 0; i < count; ++i)
4544 const T iPtr = arr[i];
4545 if(iPtr == VMA_NULL)
4549 for(uint32_t j = i + 1; j < count; ++j)
4560 template<
typename MainT,
typename NewT>
4561 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4563 newStruct->pNext = mainStruct->pNext;
4564 mainStruct->pNext = newStruct;
4570 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4572 if((pAllocationCallbacks != VMA_NULL) &&
4573 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4575 return (*pAllocationCallbacks->pfnAllocation)(
4576 pAllocationCallbacks->pUserData,
4579 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4583 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4587 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4589 if((pAllocationCallbacks != VMA_NULL) &&
4590 (pAllocationCallbacks->pfnFree != VMA_NULL))
4592 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4596 VMA_SYSTEM_FREE(ptr);
4600 template<
typename T>
4601 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4603 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4606 template<
typename T>
4607 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4609 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4612 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4614 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4616 template<
typename T>
4617 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4620 VmaFree(pAllocationCallbacks, ptr);
4623 template<
typename T>
4624 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4628 for(
size_t i = count; i--; )
4632 VmaFree(pAllocationCallbacks, ptr);
4636 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4638 if(srcStr != VMA_NULL)
4640 const size_t len = strlen(srcStr);
4641 char*
const result = vma_new_array(allocs,
char, len + 1);
4642 memcpy(result, srcStr, len + 1);
4651 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4655 const size_t len = strlen(str);
4656 vma_delete_array(allocs, str, len + 1);
4661 template<
typename T>
4662 class VmaStlAllocator
4665 const VkAllocationCallbacks*
const m_pCallbacks;
4666 typedef T value_type;
4668 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4669 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4671 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4672 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4674 template<
typename U>
4675 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4677 return m_pCallbacks == rhs.m_pCallbacks;
4679 template<
typename U>
4680 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4682 return m_pCallbacks != rhs.m_pCallbacks;
4685 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4688 #if VMA_USE_STL_VECTOR
4690 #define VmaVector std::vector
4692 template<
typename T,
typename allocatorT>
4693 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4695 vec.insert(vec.begin() + index, item);
4698 template<
typename T,
typename allocatorT>
4699 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4701 vec.erase(vec.begin() + index);
4704 #else // #if VMA_USE_STL_VECTOR
4709 template<
typename T,
typename AllocatorT>
4713 typedef T value_type;
4715 VmaVector(
const AllocatorT& allocator) :
4716 m_Allocator(allocator),
4723 VmaVector(
size_t count,
const AllocatorT& allocator) :
4724 m_Allocator(allocator),
4725 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4733 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4734 : VmaVector(count, allocator) {}
4736 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4737 m_Allocator(src.m_Allocator),
4738 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4739 m_Count(src.m_Count),
4740 m_Capacity(src.m_Count)
4744 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4750 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4753 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4757 resize(rhs.m_Count);
4760 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4766 bool empty()
const {
return m_Count == 0; }
4767 size_t size()
const {
return m_Count; }
4768 T* data() {
return m_pArray; }
4769 const T* data()
const {
return m_pArray; }
4771 T& operator[](
size_t index)
4773 VMA_HEAVY_ASSERT(index < m_Count);
4774 return m_pArray[index];
4776 const T& operator[](
size_t index)
const
4778 VMA_HEAVY_ASSERT(index < m_Count);
4779 return m_pArray[index];
4784 VMA_HEAVY_ASSERT(m_Count > 0);
4787 const T& front()
const
4789 VMA_HEAVY_ASSERT(m_Count > 0);
4794 VMA_HEAVY_ASSERT(m_Count > 0);
4795 return m_pArray[m_Count - 1];
4797 const T& back()
const
4799 VMA_HEAVY_ASSERT(m_Count > 0);
4800 return m_pArray[m_Count - 1];
4803 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4805 newCapacity = VMA_MAX(newCapacity, m_Count);
4807 if((newCapacity < m_Capacity) && !freeMemory)
4809 newCapacity = m_Capacity;
4812 if(newCapacity != m_Capacity)
4814 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4817 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4819 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4820 m_Capacity = newCapacity;
4821 m_pArray = newArray;
4825 void resize(
size_t newCount,
bool freeMemory =
false)
4827 size_t newCapacity = m_Capacity;
4828 if(newCount > m_Capacity)
4830 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4834 newCapacity = newCount;
4837 if(newCapacity != m_Capacity)
4839 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4840 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4841 if(elementsToCopy != 0)
4843 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4845 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4846 m_Capacity = newCapacity;
4847 m_pArray = newArray;
4853 void clear(
bool freeMemory =
false)
4855 resize(0, freeMemory);
4858 void insert(
size_t index,
const T& src)
4860 VMA_HEAVY_ASSERT(index <= m_Count);
4861 const size_t oldCount = size();
4862 resize(oldCount + 1);
4863 if(index < oldCount)
4865 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4867 m_pArray[index] = src;
4870 void remove(
size_t index)
4872 VMA_HEAVY_ASSERT(index < m_Count);
4873 const size_t oldCount = size();
4874 if(index < oldCount - 1)
4876 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4878 resize(oldCount - 1);
4881 void push_back(
const T& src)
4883 const size_t newIndex = size();
4884 resize(newIndex + 1);
4885 m_pArray[newIndex] = src;
4890 VMA_HEAVY_ASSERT(m_Count > 0);
4894 void push_front(
const T& src)
4901 VMA_HEAVY_ASSERT(m_Count > 0);
4905 typedef T* iterator;
4907 iterator begin() {
return m_pArray; }
4908 iterator end() {
return m_pArray + m_Count; }
4911 AllocatorT m_Allocator;
4917 template<
typename T,
typename allocatorT>
4918 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4920 vec.insert(index, item);
4923 template<
typename T,
typename allocatorT>
4924 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4929 #endif // #if VMA_USE_STL_VECTOR
4931 template<
typename CmpLess,
typename VectorT>
4932 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4934 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4936 vector.data() + vector.size(),
4938 CmpLess()) - vector.data();
4939 VmaVectorInsert(vector, indexToInsert, value);
4940 return indexToInsert;
4943 template<
typename CmpLess,
typename VectorT>
4944 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4947 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4952 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4954 size_t indexToRemove = it - vector.begin();
4955 VmaVectorRemove(vector, indexToRemove);
4969 template<
typename T>
4970 class VmaPoolAllocator
4972 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4974 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4975 ~VmaPoolAllocator();
4976 template<
typename... Types> T* Alloc(Types... args);
4982 uint32_t NextFreeIndex;
4983 alignas(T)
char Value[
sizeof(T)];
4990 uint32_t FirstFreeIndex;
4993 const VkAllocationCallbacks* m_pAllocationCallbacks;
4994 const uint32_t m_FirstBlockCapacity;
4995 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4997 ItemBlock& CreateNewBlock();
5000 template<
typename T>
5001 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
5002 m_pAllocationCallbacks(pAllocationCallbacks),
5003 m_FirstBlockCapacity(firstBlockCapacity),
5004 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
5006 VMA_ASSERT(m_FirstBlockCapacity > 1);
5009 template<
typename T>
5010 VmaPoolAllocator<T>::~VmaPoolAllocator()
5012 for(
size_t i = m_ItemBlocks.size(); i--; )
5013 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
5014 m_ItemBlocks.clear();
5017 template<
typename T>
5018 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
5020 for(
size_t i = m_ItemBlocks.size(); i--; )
5022 ItemBlock& block = m_ItemBlocks[i];
5024 if(block.FirstFreeIndex != UINT32_MAX)
5026 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
5027 block.FirstFreeIndex = pItem->NextFreeIndex;
5028 T* result = (T*)&pItem->Value;
5029 new(result)T(std::forward<Types>(args)...);
5035 ItemBlock& newBlock = CreateNewBlock();
5036 Item*
const pItem = &newBlock.pItems[0];
5037 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
5038 T* result = (T*)&pItem->Value;
5039 new(result)T(std::forward<Types>(args)...);
5043 template<
typename T>
5044 void VmaPoolAllocator<T>::Free(T* ptr)
5047 for(
size_t i = m_ItemBlocks.size(); i--; )
5049 ItemBlock& block = m_ItemBlocks[i];
5053 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
5056 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
5059 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
5060 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
5061 block.FirstFreeIndex = index;
5065 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
5068 template<
typename T>
5069 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
5071 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
5072 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
5074 const ItemBlock newBlock = {
5075 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
5079 m_ItemBlocks.push_back(newBlock);
5082 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
5083 newBlock.pItems[i].NextFreeIndex = i + 1;
5084 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
5085 return m_ItemBlocks.back();
5091 #if VMA_USE_STL_LIST
5093 #define VmaList std::list
5095 #else // #if VMA_USE_STL_LIST
5097 template<
typename T>
5106 template<
typename T>
5109 VMA_CLASS_NO_COPY(VmaRawList)
5111 typedef VmaListItem<T> ItemType;
5113 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5117 size_t GetCount()
const {
return m_Count; }
5118 bool IsEmpty()
const {
return m_Count == 0; }
5120 ItemType* Front() {
return m_pFront; }
5121 const ItemType* Front()
const {
return m_pFront; }
5122 ItemType* Back() {
return m_pBack; }
5123 const ItemType* Back()
const {
return m_pBack; }
5125 ItemType* PushBack();
5126 ItemType* PushFront();
5127 ItemType* PushBack(
const T& value);
5128 ItemType* PushFront(
const T& value);
5133 ItemType* InsertBefore(ItemType* pItem);
5135 ItemType* InsertAfter(ItemType* pItem);
5137 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5138 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5140 void Remove(ItemType* pItem);
5143 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5144 VmaPoolAllocator<ItemType> m_ItemAllocator;
5150 template<
typename T>
5151 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5152 m_pAllocationCallbacks(pAllocationCallbacks),
5153 m_ItemAllocator(pAllocationCallbacks, 128),
5160 template<
typename T>
5161 VmaRawList<T>::~VmaRawList()
5167 template<
typename T>
5168 void VmaRawList<T>::Clear()
5170 if(IsEmpty() ==
false)
5172 ItemType* pItem = m_pBack;
5173 while(pItem != VMA_NULL)
5175 ItemType*
const pPrevItem = pItem->pPrev;
5176 m_ItemAllocator.Free(pItem);
5179 m_pFront = VMA_NULL;
5185 template<
typename T>
5186 VmaListItem<T>* VmaRawList<T>::PushBack()
5188 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5189 pNewItem->pNext = VMA_NULL;
5192 pNewItem->pPrev = VMA_NULL;
5193 m_pFront = pNewItem;
5199 pNewItem->pPrev = m_pBack;
5200 m_pBack->pNext = pNewItem;
5207 template<
typename T>
5208 VmaListItem<T>* VmaRawList<T>::PushFront()
5210 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5211 pNewItem->pPrev = VMA_NULL;
5214 pNewItem->pNext = VMA_NULL;
5215 m_pFront = pNewItem;
5221 pNewItem->pNext = m_pFront;
5222 m_pFront->pPrev = pNewItem;
5223 m_pFront = pNewItem;
5229 template<
typename T>
5230 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5232 ItemType*
const pNewItem = PushBack();
5233 pNewItem->Value = value;
5237 template<
typename T>
5238 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5240 ItemType*
const pNewItem = PushFront();
5241 pNewItem->Value = value;
5245 template<
typename T>
5246 void VmaRawList<T>::PopBack()
5248 VMA_HEAVY_ASSERT(m_Count > 0);
5249 ItemType*
const pBackItem = m_pBack;
5250 ItemType*
const pPrevItem = pBackItem->pPrev;
5251 if(pPrevItem != VMA_NULL)
5253 pPrevItem->pNext = VMA_NULL;
5255 m_pBack = pPrevItem;
5256 m_ItemAllocator.Free(pBackItem);
5260 template<
typename T>
5261 void VmaRawList<T>::PopFront()
5263 VMA_HEAVY_ASSERT(m_Count > 0);
5264 ItemType*
const pFrontItem = m_pFront;
5265 ItemType*
const pNextItem = pFrontItem->pNext;
5266 if(pNextItem != VMA_NULL)
5268 pNextItem->pPrev = VMA_NULL;
5270 m_pFront = pNextItem;
5271 m_ItemAllocator.Free(pFrontItem);
5275 template<
typename T>
5276 void VmaRawList<T>::Remove(ItemType* pItem)
5278 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5279 VMA_HEAVY_ASSERT(m_Count > 0);
5281 if(pItem->pPrev != VMA_NULL)
5283 pItem->pPrev->pNext = pItem->pNext;
5287 VMA_HEAVY_ASSERT(m_pFront == pItem);
5288 m_pFront = pItem->pNext;
5291 if(pItem->pNext != VMA_NULL)
5293 pItem->pNext->pPrev = pItem->pPrev;
5297 VMA_HEAVY_ASSERT(m_pBack == pItem);
5298 m_pBack = pItem->pPrev;
5301 m_ItemAllocator.Free(pItem);
5305 template<
typename T>
5306 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5308 if(pItem != VMA_NULL)
5310 ItemType*
const prevItem = pItem->pPrev;
5311 ItemType*
const newItem = m_ItemAllocator.Alloc();
5312 newItem->pPrev = prevItem;
5313 newItem->pNext = pItem;
5314 pItem->pPrev = newItem;
5315 if(prevItem != VMA_NULL)
5317 prevItem->pNext = newItem;
5321 VMA_HEAVY_ASSERT(m_pFront == pItem);
5331 template<
typename T>
5332 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5334 if(pItem != VMA_NULL)
5336 ItemType*
const nextItem = pItem->pNext;
5337 ItemType*
const newItem = m_ItemAllocator.Alloc();
5338 newItem->pNext = nextItem;
5339 newItem->pPrev = pItem;
5340 pItem->pNext = newItem;
5341 if(nextItem != VMA_NULL)
5343 nextItem->pPrev = newItem;
5347 VMA_HEAVY_ASSERT(m_pBack == pItem);
5357 template<
typename T>
5358 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5360 ItemType*
const newItem = InsertBefore(pItem);
5361 newItem->Value = value;
5365 template<
typename T>
5366 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5368 ItemType*
const newItem = InsertAfter(pItem);
5369 newItem->Value = value;
5373 template<
typename T,
typename AllocatorT>
5376 VMA_CLASS_NO_COPY(VmaList)
5387 T& operator*()
const
5389 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5390 return m_pItem->Value;
5392 T* operator->()
const
5394 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5395 return &m_pItem->Value;
5398 iterator& operator++()
5400 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5401 m_pItem = m_pItem->pNext;
5404 iterator& operator--()
5406 if(m_pItem != VMA_NULL)
5408 m_pItem = m_pItem->pPrev;
5412 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5413 m_pItem = m_pList->Back();
5418 iterator operator++(
int)
5420 iterator result = *
this;
5424 iterator operator--(
int)
5426 iterator result = *
this;
5431 bool operator==(
const iterator& rhs)
const
5433 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5434 return m_pItem == rhs.m_pItem;
5436 bool operator!=(
const iterator& rhs)
const
5438 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5439 return m_pItem != rhs.m_pItem;
5443 VmaRawList<T>* m_pList;
5444 VmaListItem<T>* m_pItem;
5446 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5452 friend class VmaList<T, AllocatorT>;
5455 class const_iterator
5464 const_iterator(
const iterator& src) :
5465 m_pList(src.m_pList),
5466 m_pItem(src.m_pItem)
5470 const T& operator*()
const
5472 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5473 return m_pItem->Value;
5475 const T* operator->()
const
5477 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5478 return &m_pItem->Value;
5481 const_iterator& operator++()
5483 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5484 m_pItem = m_pItem->pNext;
5487 const_iterator& operator--()
5489 if(m_pItem != VMA_NULL)
5491 m_pItem = m_pItem->pPrev;
5495 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5496 m_pItem = m_pList->Back();
5501 const_iterator operator++(
int)
5503 const_iterator result = *
this;
5507 const_iterator operator--(
int)
5509 const_iterator result = *
this;
5514 bool operator==(
const const_iterator& rhs)
const
5516 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5517 return m_pItem == rhs.m_pItem;
5519 bool operator!=(
const const_iterator& rhs)
const
5521 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5522 return m_pItem != rhs.m_pItem;
5526 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5532 const VmaRawList<T>* m_pList;
5533 const VmaListItem<T>* m_pItem;
5535 friend class VmaList<T, AllocatorT>;
5538 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5540 bool empty()
const {
return m_RawList.IsEmpty(); }
5541 size_t size()
const {
return m_RawList.GetCount(); }
5543 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5544 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5546 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5547 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5549 void clear() { m_RawList.Clear(); }
5550 void push_back(
const T& value) { m_RawList.PushBack(value); }
5551 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5552 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5555 VmaRawList<T> m_RawList;
5558 #endif // #if VMA_USE_STL_LIST
5566 #if VMA_USE_STL_UNORDERED_MAP
5568 #define VmaPair std::pair
5570 #define VMA_MAP_TYPE(KeyT, ValueT) \
5571 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5573 #else // #if VMA_USE_STL_UNORDERED_MAP
5575 template<
typename T1,
typename T2>
5581 VmaPair() : first(), second() { }
5582 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5588 template<
typename KeyT,
typename ValueT>
5592 typedef VmaPair<KeyT, ValueT> PairType;
5593 typedef PairType* iterator;
5595 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5597 iterator begin() {
return m_Vector.begin(); }
5598 iterator end() {
return m_Vector.end(); }
5600 void insert(
const PairType& pair);
5601 iterator find(
const KeyT& key);
5602 void erase(iterator it);
5605 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5608 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5610 template<
typename FirstT,
typename SecondT>
5611 struct VmaPairFirstLess
5613 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5615 return lhs.first < rhs.first;
5617 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5619 return lhs.first < rhsFirst;
5623 template<
typename KeyT,
typename ValueT>
5624 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5626 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5628 m_Vector.data() + m_Vector.size(),
5630 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5631 VmaVectorInsert(m_Vector, indexToInsert, pair);
5634 template<
typename KeyT,
typename ValueT>
5635 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5637 PairType* it = VmaBinaryFindFirstNotLess(
5639 m_Vector.data() + m_Vector.size(),
5641 VmaPairFirstLess<KeyT, ValueT>());
5642 if((it != m_Vector.end()) && (it->first == key))
5648 return m_Vector.end();
5652 template<
typename KeyT,
typename ValueT>
5653 void VmaMap<KeyT, ValueT>::erase(iterator it)
5655 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5658 #endif // #if VMA_USE_STL_UNORDERED_MAP
5664 class VmaDeviceMemoryBlock;
5666 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5668 struct VmaAllocation_T
5671 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5675 FLAG_USER_DATA_STRING = 0x01,
5679 enum ALLOCATION_TYPE
5681 ALLOCATION_TYPE_NONE,
5682 ALLOCATION_TYPE_BLOCK,
5683 ALLOCATION_TYPE_DEDICATED,
5690 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5693 m_pUserData{VMA_NULL},
5694 m_LastUseFrameIndex{currentFrameIndex},
5695 m_MemoryTypeIndex{0},
5696 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5697 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5699 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5701 #if VMA_STATS_STRING_ENABLED
5702 m_CreationFrameIndex = currentFrameIndex;
5703 m_BufferImageUsage = 0;
5709 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5712 VMA_ASSERT(m_pUserData == VMA_NULL);
5715 void InitBlockAllocation(
5716 VmaDeviceMemoryBlock* block,
5717 VkDeviceSize offset,
5718 VkDeviceSize alignment,
5720 uint32_t memoryTypeIndex,
5721 VmaSuballocationType suballocationType,
5725 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5726 VMA_ASSERT(block != VMA_NULL);
5727 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5728 m_Alignment = alignment;
5730 m_MemoryTypeIndex = memoryTypeIndex;
5731 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5732 m_SuballocationType = (uint8_t)suballocationType;
5733 m_BlockAllocation.m_Block = block;
5734 m_BlockAllocation.m_Offset = offset;
5735 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5740 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5741 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5742 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5743 m_MemoryTypeIndex = 0;
5744 m_BlockAllocation.m_Block = VMA_NULL;
5745 m_BlockAllocation.m_Offset = 0;
5746 m_BlockAllocation.m_CanBecomeLost =
true;
5749 void ChangeBlockAllocation(
5751 VmaDeviceMemoryBlock* block,
5752 VkDeviceSize offset);
5754 void ChangeOffset(VkDeviceSize newOffset);
5757 void InitDedicatedAllocation(
5758 uint32_t memoryTypeIndex,
5759 VkDeviceMemory hMemory,
5760 VmaSuballocationType suballocationType,
5764 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5765 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5766 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5769 m_MemoryTypeIndex = memoryTypeIndex;
5770 m_SuballocationType = (uint8_t)suballocationType;
5771 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5772 m_DedicatedAllocation.m_hMemory = hMemory;
5773 m_DedicatedAllocation.m_pMappedData = pMappedData;
5776 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5777 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5778 VkDeviceSize GetSize()
const {
return m_Size; }
5779 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5780 void* GetUserData()
const {
return m_pUserData; }
5781 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5782 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5784 VmaDeviceMemoryBlock* GetBlock()
const
5786 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5787 return m_BlockAllocation.m_Block;
5789 VkDeviceSize GetOffset()
const;
5790 VkDeviceMemory GetMemory()
const;
5791 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5792 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5793 void* GetMappedData()
const;
5794 bool CanBecomeLost()
const;
5796 uint32_t GetLastUseFrameIndex()
const
5798 return m_LastUseFrameIndex.load();
5800 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5802 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5812 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5814 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5816 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5827 void BlockAllocMap();
5828 void BlockAllocUnmap();
5829 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5832 #if VMA_STATS_STRING_ENABLED
5833 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5834 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5836 void InitBufferImageUsage(uint32_t bufferImageUsage)
5838 VMA_ASSERT(m_BufferImageUsage == 0);
5839 m_BufferImageUsage = bufferImageUsage;
5842 void PrintParameters(
class VmaJsonWriter& json)
const;
5846 VkDeviceSize m_Alignment;
5847 VkDeviceSize m_Size;
5849 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5850 uint32_t m_MemoryTypeIndex;
5852 uint8_t m_SuballocationType;
5859 struct BlockAllocation
5861 VmaDeviceMemoryBlock* m_Block;
5862 VkDeviceSize m_Offset;
5863 bool m_CanBecomeLost;
5867 struct DedicatedAllocation
5869 VkDeviceMemory m_hMemory;
5870 void* m_pMappedData;
5876 BlockAllocation m_BlockAllocation;
5878 DedicatedAllocation m_DedicatedAllocation;
5881 #if VMA_STATS_STRING_ENABLED
5882 uint32_t m_CreationFrameIndex;
5883 uint32_t m_BufferImageUsage;
5893 struct VmaSuballocation
5895 VkDeviceSize offset;
5898 VmaSuballocationType type;
5902 struct VmaSuballocationOffsetLess
5904 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5906 return lhs.offset < rhs.offset;
5909 struct VmaSuballocationOffsetGreater
5911 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5913 return lhs.offset > rhs.offset;
5917 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5920 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5922 enum class VmaAllocationRequestType
5944 struct VmaAllocationRequest
5946 VkDeviceSize offset;
5947 VkDeviceSize sumFreeSize;
5948 VkDeviceSize sumItemSize;
5949 VmaSuballocationList::iterator item;
5950 size_t itemsToMakeLostCount;
5952 VmaAllocationRequestType type;
5954 VkDeviceSize CalcCost()
const
5956 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5964 class VmaBlockMetadata
5968 virtual ~VmaBlockMetadata() { }
5969 virtual void Init(VkDeviceSize size) { m_Size = size; }
5972 virtual bool Validate()
const = 0;
5973 VkDeviceSize GetSize()
const {
return m_Size; }
5974 virtual size_t GetAllocationCount()
const = 0;
5975 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5976 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5978 virtual bool IsEmpty()
const = 0;
5980 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5982 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5984 #if VMA_STATS_STRING_ENABLED
5985 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5991 virtual bool CreateAllocationRequest(
5992 uint32_t currentFrameIndex,
5993 uint32_t frameInUseCount,
5994 VkDeviceSize bufferImageGranularity,
5995 VkDeviceSize allocSize,
5996 VkDeviceSize allocAlignment,
5998 VmaSuballocationType allocType,
5999 bool canMakeOtherLost,
6002 VmaAllocationRequest* pAllocationRequest) = 0;
6004 virtual bool MakeRequestedAllocationsLost(
6005 uint32_t currentFrameIndex,
6006 uint32_t frameInUseCount,
6007 VmaAllocationRequest* pAllocationRequest) = 0;
6009 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
6011 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
6015 const VmaAllocationRequest& request,
6016 VmaSuballocationType type,
6017 VkDeviceSize allocSize,
6022 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
6025 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
6027 #if VMA_STATS_STRING_ENABLED
6028 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6029 VkDeviceSize unusedBytes,
6030 size_t allocationCount,
6031 size_t unusedRangeCount)
const;
6032 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6033 VkDeviceSize offset,
6035 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6036 VkDeviceSize offset,
6037 VkDeviceSize size)
const;
6038 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
6042 VkDeviceSize m_Size;
6043 const VkAllocationCallbacks* m_pAllocationCallbacks;
6046 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
6047 VMA_ASSERT(0 && "Validation failed: " #cond); \
6051 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
6053 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
6056 virtual ~VmaBlockMetadata_Generic();
6057 virtual void Init(VkDeviceSize size);
6059 virtual bool Validate()
const;
6060 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
6061 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6062 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6063 virtual bool IsEmpty()
const;
6065 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6066 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6068 #if VMA_STATS_STRING_ENABLED
6069 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6072 virtual bool CreateAllocationRequest(
6073 uint32_t currentFrameIndex,
6074 uint32_t frameInUseCount,
6075 VkDeviceSize bufferImageGranularity,
6076 VkDeviceSize allocSize,
6077 VkDeviceSize allocAlignment,
6079 VmaSuballocationType allocType,
6080 bool canMakeOtherLost,
6082 VmaAllocationRequest* pAllocationRequest);
6084 virtual bool MakeRequestedAllocationsLost(
6085 uint32_t currentFrameIndex,
6086 uint32_t frameInUseCount,
6087 VmaAllocationRequest* pAllocationRequest);
6089 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6091 virtual VkResult CheckCorruption(
const void* pBlockData);
6094 const VmaAllocationRequest& request,
6095 VmaSuballocationType type,
6096 VkDeviceSize allocSize,
6100 virtual void FreeAtOffset(VkDeviceSize offset);
6105 bool IsBufferImageGranularityConflictPossible(
6106 VkDeviceSize bufferImageGranularity,
6107 VmaSuballocationType& inOutPrevSuballocType)
const;
6110 friend class VmaDefragmentationAlgorithm_Generic;
6111 friend class VmaDefragmentationAlgorithm_Fast;
6113 uint32_t m_FreeCount;
6114 VkDeviceSize m_SumFreeSize;
6115 VmaSuballocationList m_Suballocations;
6118 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6120 bool ValidateFreeSuballocationList()
const;
6124 bool CheckAllocation(
6125 uint32_t currentFrameIndex,
6126 uint32_t frameInUseCount,
6127 VkDeviceSize bufferImageGranularity,
6128 VkDeviceSize allocSize,
6129 VkDeviceSize allocAlignment,
6130 VmaSuballocationType allocType,
6131 VmaSuballocationList::const_iterator suballocItem,
6132 bool canMakeOtherLost,
6133 VkDeviceSize* pOffset,
6134 size_t* itemsToMakeLostCount,
6135 VkDeviceSize* pSumFreeSize,
6136 VkDeviceSize* pSumItemSize)
const;
6138 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6142 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6145 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6148 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6229 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6231 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6234 virtual ~VmaBlockMetadata_Linear();
6235 virtual void Init(VkDeviceSize size);
6237 virtual bool Validate()
const;
6238 virtual size_t GetAllocationCount()
const;
6239 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6240 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6241 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6243 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6244 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6246 #if VMA_STATS_STRING_ENABLED
6247 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6250 virtual bool CreateAllocationRequest(
6251 uint32_t currentFrameIndex,
6252 uint32_t frameInUseCount,
6253 VkDeviceSize bufferImageGranularity,
6254 VkDeviceSize allocSize,
6255 VkDeviceSize allocAlignment,
6257 VmaSuballocationType allocType,
6258 bool canMakeOtherLost,
6260 VmaAllocationRequest* pAllocationRequest);
6262 virtual bool MakeRequestedAllocationsLost(
6263 uint32_t currentFrameIndex,
6264 uint32_t frameInUseCount,
6265 VmaAllocationRequest* pAllocationRequest);
6267 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6269 virtual VkResult CheckCorruption(
const void* pBlockData);
6272 const VmaAllocationRequest& request,
6273 VmaSuballocationType type,
6274 VkDeviceSize allocSize,
6278 virtual void FreeAtOffset(VkDeviceSize offset);
6288 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6290 enum SECOND_VECTOR_MODE
6292 SECOND_VECTOR_EMPTY,
6297 SECOND_VECTOR_RING_BUFFER,
6303 SECOND_VECTOR_DOUBLE_STACK,
6306 VkDeviceSize m_SumFreeSize;
6307 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6308 uint32_t m_1stVectorIndex;
6309 SECOND_VECTOR_MODE m_2ndVectorMode;
6311 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6312 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6313 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6314 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6317 size_t m_1stNullItemsBeginCount;
6319 size_t m_1stNullItemsMiddleCount;
6321 size_t m_2ndNullItemsCount;
6323 bool ShouldCompact1st()
const;
6324 void CleanupAfterFree();
6326 bool CreateAllocationRequest_LowerAddress(
6327 uint32_t currentFrameIndex,
6328 uint32_t frameInUseCount,
6329 VkDeviceSize bufferImageGranularity,
6330 VkDeviceSize allocSize,
6331 VkDeviceSize allocAlignment,
6332 VmaSuballocationType allocType,
6333 bool canMakeOtherLost,
6335 VmaAllocationRequest* pAllocationRequest);
6336 bool CreateAllocationRequest_UpperAddress(
6337 uint32_t currentFrameIndex,
6338 uint32_t frameInUseCount,
6339 VkDeviceSize bufferImageGranularity,
6340 VkDeviceSize allocSize,
6341 VkDeviceSize allocAlignment,
6342 VmaSuballocationType allocType,
6343 bool canMakeOtherLost,
6345 VmaAllocationRequest* pAllocationRequest);
6359 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6361 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6364 virtual ~VmaBlockMetadata_Buddy();
6365 virtual void Init(VkDeviceSize size);
6367 virtual bool Validate()
const;
6368 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6369 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6370 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6371 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6373 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6374 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6376 #if VMA_STATS_STRING_ENABLED
6377 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6380 virtual bool CreateAllocationRequest(
6381 uint32_t currentFrameIndex,
6382 uint32_t frameInUseCount,
6383 VkDeviceSize bufferImageGranularity,
6384 VkDeviceSize allocSize,
6385 VkDeviceSize allocAlignment,
6387 VmaSuballocationType allocType,
6388 bool canMakeOtherLost,
6390 VmaAllocationRequest* pAllocationRequest);
6392 virtual bool MakeRequestedAllocationsLost(
6393 uint32_t currentFrameIndex,
6394 uint32_t frameInUseCount,
6395 VmaAllocationRequest* pAllocationRequest);
6397 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6399 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6402 const VmaAllocationRequest& request,
6403 VmaSuballocationType type,
6404 VkDeviceSize allocSize,
6407 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6408 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6411 static const VkDeviceSize MIN_NODE_SIZE = 32;
6412 static const size_t MAX_LEVELS = 30;
6414 struct ValidationContext
6416 size_t calculatedAllocationCount;
6417 size_t calculatedFreeCount;
6418 VkDeviceSize calculatedSumFreeSize;
6420 ValidationContext() :
6421 calculatedAllocationCount(0),
6422 calculatedFreeCount(0),
6423 calculatedSumFreeSize(0) { }
6428 VkDeviceSize offset;
6458 VkDeviceSize m_UsableSize;
6459 uint32_t m_LevelCount;
6465 } m_FreeList[MAX_LEVELS];
6467 size_t m_AllocationCount;
6471 VkDeviceSize m_SumFreeSize;
6473 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6474 void DeleteNode(Node* node);
6475 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6476 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6477 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6479 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6480 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6484 void AddToFreeListFront(uint32_t level, Node* node);
6488 void RemoveFromFreeList(uint32_t level, Node* node);
6490 #if VMA_STATS_STRING_ENABLED
6491 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6501 class VmaDeviceMemoryBlock
6503 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6505 VmaBlockMetadata* m_pMetadata;
6509 ~VmaDeviceMemoryBlock()
6511 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6512 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6519 uint32_t newMemoryTypeIndex,
6520 VkDeviceMemory newMemory,
6521 VkDeviceSize newSize,
6523 uint32_t algorithm);
6527 VmaPool GetParentPool()
const {
return m_hParentPool; }
6528 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6529 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6530 uint32_t GetId()
const {
return m_Id; }
6531 void* GetMappedData()
const {
return m_pMappedData; }
6534 bool Validate()
const;
6539 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6542 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6543 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6545 VkResult BindBufferMemory(
6548 VkDeviceSize allocationLocalOffset,
6551 VkResult BindImageMemory(
6554 VkDeviceSize allocationLocalOffset,
6560 uint32_t m_MemoryTypeIndex;
6562 VkDeviceMemory m_hMemory;
6570 uint32_t m_MapCount;
6571 void* m_pMappedData;
6574 struct VmaPointerLess
6576 bool operator()(
const void* lhs,
const void* rhs)
const
6582 struct VmaDefragmentationMove
6584 size_t srcBlockIndex;
6585 size_t dstBlockIndex;
6586 VkDeviceSize srcOffset;
6587 VkDeviceSize dstOffset;
6590 VmaDeviceMemoryBlock* pSrcBlock;
6591 VmaDeviceMemoryBlock* pDstBlock;
6594 class VmaDefragmentationAlgorithm;
6602 struct VmaBlockVector
6604 VMA_CLASS_NO_COPY(VmaBlockVector)
6609 uint32_t memoryTypeIndex,
6610 VkDeviceSize preferredBlockSize,
6611 size_t minBlockCount,
6612 size_t maxBlockCount,
6613 VkDeviceSize bufferImageGranularity,
6614 uint32_t frameInUseCount,
6615 bool explicitBlockSize,
6616 uint32_t algorithm);
6619 VkResult CreateMinBlocks();
6621 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6622 VmaPool GetParentPool()
const {
return m_hParentPool; }
6623 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6624 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6625 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6626 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6627 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6628 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6633 bool IsCorruptionDetectionEnabled()
const;
6636 uint32_t currentFrameIndex,
6638 VkDeviceSize alignment,
6640 VmaSuballocationType suballocType,
6641 size_t allocationCount,
6649 #if VMA_STATS_STRING_ENABLED
6650 void PrintDetailedMap(
class VmaJsonWriter& json);
6653 void MakePoolAllocationsLost(
6654 uint32_t currentFrameIndex,
6655 size_t* pLostAllocationCount);
6656 VkResult CheckCorruption();
6660 class VmaBlockVectorDefragmentationContext* pCtx,
6662 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6663 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6664 VkCommandBuffer commandBuffer);
6665 void DefragmentationEnd(
6666 class VmaBlockVectorDefragmentationContext* pCtx,
6670 uint32_t ProcessDefragmentations(
6671 class VmaBlockVectorDefragmentationContext *pCtx,
6674 void CommitDefragmentations(
6675 class VmaBlockVectorDefragmentationContext *pCtx,
6681 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6682 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6683 size_t CalcAllocationCount()
const;
6684 bool IsBufferImageGranularityConflictPossible()
const;
6687 friend class VmaDefragmentationAlgorithm_Generic;
6691 const uint32_t m_MemoryTypeIndex;
6692 const VkDeviceSize m_PreferredBlockSize;
6693 const size_t m_MinBlockCount;
6694 const size_t m_MaxBlockCount;
6695 const VkDeviceSize m_BufferImageGranularity;
6696 const uint32_t m_FrameInUseCount;
6697 const bool m_ExplicitBlockSize;
6698 const uint32_t m_Algorithm;
6699 VMA_RW_MUTEX m_Mutex;
6703 bool m_HasEmptyBlock;
6705 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6706 uint32_t m_NextBlockId;
6708 VkDeviceSize CalcMaxBlockSize()
const;
6711 void Remove(VmaDeviceMemoryBlock* pBlock);
6715 void IncrementallySortBlocks();
6717 VkResult AllocatePage(
6718 uint32_t currentFrameIndex,
6720 VkDeviceSize alignment,
6722 VmaSuballocationType suballocType,
6726 VkResult AllocateFromBlock(
6727 VmaDeviceMemoryBlock* pBlock,
6728 uint32_t currentFrameIndex,
6730 VkDeviceSize alignment,
6733 VmaSuballocationType suballocType,
6737 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6740 void ApplyDefragmentationMovesCpu(
6741 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6742 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6744 void ApplyDefragmentationMovesGpu(
6745 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6746 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6747 VkCommandBuffer commandBuffer);
6755 void UpdateHasEmptyBlock();
6760 VMA_CLASS_NO_COPY(VmaPool_T)
6762 VmaBlockVector m_BlockVector;
6767 VkDeviceSize preferredBlockSize);
6770 uint32_t GetId()
const {
return m_Id; }
6771 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6773 const char* GetName()
const {
return m_Name; }
6774 void SetName(
const char* pName);
6776 #if VMA_STATS_STRING_ENABLED
6792 class VmaDefragmentationAlgorithm
6794 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6796 VmaDefragmentationAlgorithm(
6798 VmaBlockVector* pBlockVector,
6799 uint32_t currentFrameIndex) :
6800 m_hAllocator(hAllocator),
6801 m_pBlockVector(pBlockVector),
6802 m_CurrentFrameIndex(currentFrameIndex)
6805 virtual ~VmaDefragmentationAlgorithm()
6809 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6810 virtual void AddAll() = 0;
6812 virtual VkResult Defragment(
6813 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6814 VkDeviceSize maxBytesToMove,
6815 uint32_t maxAllocationsToMove,
6818 virtual VkDeviceSize GetBytesMoved()
const = 0;
6819 virtual uint32_t GetAllocationsMoved()
const = 0;
6823 VmaBlockVector*
const m_pBlockVector;
6824 const uint32_t m_CurrentFrameIndex;
6826 struct AllocationInfo
6829 VkBool32* m_pChanged;
6832 m_hAllocation(VK_NULL_HANDLE),
6833 m_pChanged(VMA_NULL)
6837 m_hAllocation(hAlloc),
6838 m_pChanged(pChanged)
6844 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6846 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6848 VmaDefragmentationAlgorithm_Generic(
6850 VmaBlockVector* pBlockVector,
6851 uint32_t currentFrameIndex,
6852 bool overlappingMoveSupported);
6853 virtual ~VmaDefragmentationAlgorithm_Generic();
6855 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6856 virtual void AddAll() { m_AllAllocations =
true; }
6858 virtual VkResult Defragment(
6859 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6860 VkDeviceSize maxBytesToMove,
6861 uint32_t maxAllocationsToMove,
6864 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6865 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6868 uint32_t m_AllocationCount;
6869 bool m_AllAllocations;
6871 VkDeviceSize m_BytesMoved;
6872 uint32_t m_AllocationsMoved;
6874 struct AllocationInfoSizeGreater
6876 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6878 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6882 struct AllocationInfoOffsetGreater
6884 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6886 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6892 size_t m_OriginalBlockIndex;
6893 VmaDeviceMemoryBlock* m_pBlock;
6894 bool m_HasNonMovableAllocations;
6895 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6897 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6898 m_OriginalBlockIndex(SIZE_MAX),
6900 m_HasNonMovableAllocations(true),
6901 m_Allocations(pAllocationCallbacks)
6905 void CalcHasNonMovableAllocations()
6907 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6908 const size_t defragmentAllocCount = m_Allocations.size();
6909 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6912 void SortAllocationsBySizeDescending()
6914 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6917 void SortAllocationsByOffsetDescending()
6919 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6923 struct BlockPointerLess
6925 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6927 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6929 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6931 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6937 struct BlockInfoCompareMoveDestination
6939 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6941 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6945 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6949 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6957 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6958 BlockInfoVector m_Blocks;
6960 VkResult DefragmentRound(
6961 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6962 VkDeviceSize maxBytesToMove,
6963 uint32_t maxAllocationsToMove,
6964 bool freeOldAllocations);
6966 size_t CalcBlocksWithNonMovableCount()
const;
6968 static bool MoveMakesSense(
6969 size_t dstBlockIndex, VkDeviceSize dstOffset,
6970 size_t srcBlockIndex, VkDeviceSize srcOffset);
6973 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6975 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6977 VmaDefragmentationAlgorithm_Fast(
6979 VmaBlockVector* pBlockVector,
6980 uint32_t currentFrameIndex,
6981 bool overlappingMoveSupported);
6982 virtual ~VmaDefragmentationAlgorithm_Fast();
6984 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6985 virtual void AddAll() { m_AllAllocations =
true; }
6987 virtual VkResult Defragment(
6988 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6989 VkDeviceSize maxBytesToMove,
6990 uint32_t maxAllocationsToMove,
6993 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6994 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6999 size_t origBlockIndex;
7002 class FreeSpaceDatabase
7008 s.blockInfoIndex = SIZE_MAX;
7009 for(
size_t i = 0; i < MAX_COUNT; ++i)
7011 m_FreeSpaces[i] = s;
7015 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
7017 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7023 size_t bestIndex = SIZE_MAX;
7024 for(
size_t i = 0; i < MAX_COUNT; ++i)
7027 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
7032 if(m_FreeSpaces[i].size < size &&
7033 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
7039 if(bestIndex != SIZE_MAX)
7041 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
7042 m_FreeSpaces[bestIndex].offset = offset;
7043 m_FreeSpaces[bestIndex].size = size;
7047 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
7048 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
7050 size_t bestIndex = SIZE_MAX;
7051 VkDeviceSize bestFreeSpaceAfter = 0;
7052 for(
size_t i = 0; i < MAX_COUNT; ++i)
7055 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
7057 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
7059 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
7061 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
7063 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
7066 bestFreeSpaceAfter = freeSpaceAfter;
7072 if(bestIndex != SIZE_MAX)
7074 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
7075 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
7077 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7080 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
7081 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
7082 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
7087 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7097 static const size_t MAX_COUNT = 4;
7101 size_t blockInfoIndex;
7102 VkDeviceSize offset;
7104 } m_FreeSpaces[MAX_COUNT];
7107 const bool m_OverlappingMoveSupported;
7109 uint32_t m_AllocationCount;
7110 bool m_AllAllocations;
7112 VkDeviceSize m_BytesMoved;
7113 uint32_t m_AllocationsMoved;
7115 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7117 void PreprocessMetadata();
7118 void PostprocessMetadata();
7119 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7122 struct VmaBlockDefragmentationContext
7126 BLOCK_FLAG_USED = 0x00000001,
7132 class VmaBlockVectorDefragmentationContext
7134 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7138 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7139 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7140 uint32_t defragmentationMovesProcessed;
7141 uint32_t defragmentationMovesCommitted;
7142 bool hasDefragmentationPlan;
7144 VmaBlockVectorDefragmentationContext(
7147 VmaBlockVector* pBlockVector,
7148 uint32_t currFrameIndex);
7149 ~VmaBlockVectorDefragmentationContext();
7151 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7152 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7153 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7155 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7156 void AddAll() { m_AllAllocations =
true; }
7165 VmaBlockVector*
const m_pBlockVector;
7166 const uint32_t m_CurrFrameIndex;
7168 VmaDefragmentationAlgorithm* m_pAlgorithm;
7176 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7177 bool m_AllAllocations;
7180 struct VmaDefragmentationContext_T
7183 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7185 VmaDefragmentationContext_T(
7187 uint32_t currFrameIndex,
7190 ~VmaDefragmentationContext_T();
7192 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
7193 void AddAllocations(
7194 uint32_t allocationCount,
7196 VkBool32* pAllocationsChanged);
7204 VkResult Defragment(
7205 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7206 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7210 VkResult DefragmentPassEnd();
7214 const uint32_t m_CurrFrameIndex;
7215 const uint32_t m_Flags;
7218 VkDeviceSize m_MaxCpuBytesToMove;
7219 uint32_t m_MaxCpuAllocationsToMove;
7220 VkDeviceSize m_MaxGpuBytesToMove;
7221 uint32_t m_MaxGpuAllocationsToMove;
7224 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7226 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7229 #if VMA_RECORDING_ENABLED
7236 void WriteConfiguration(
7237 const VkPhysicalDeviceProperties& devProps,
7238 const VkPhysicalDeviceMemoryProperties& memProps,
7239 uint32_t vulkanApiVersion,
7240 bool dedicatedAllocationExtensionEnabled,
7241 bool bindMemory2ExtensionEnabled,
7242 bool memoryBudgetExtensionEnabled,
7243 bool deviceCoherentMemoryExtensionEnabled);
7246 void RecordCreateAllocator(uint32_t frameIndex);
7247 void RecordDestroyAllocator(uint32_t frameIndex);
7248 void RecordCreatePool(uint32_t frameIndex,
7251 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7252 void RecordAllocateMemory(uint32_t frameIndex,
7253 const VkMemoryRequirements& vkMemReq,
7256 void RecordAllocateMemoryPages(uint32_t frameIndex,
7257 const VkMemoryRequirements& vkMemReq,
7259 uint64_t allocationCount,
7261 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7262 const VkMemoryRequirements& vkMemReq,
7263 bool requiresDedicatedAllocation,
7264 bool prefersDedicatedAllocation,
7267 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7268 const VkMemoryRequirements& vkMemReq,
7269 bool requiresDedicatedAllocation,
7270 bool prefersDedicatedAllocation,
7273 void RecordFreeMemory(uint32_t frameIndex,
7275 void RecordFreeMemoryPages(uint32_t frameIndex,
7276 uint64_t allocationCount,
7278 void RecordSetAllocationUserData(uint32_t frameIndex,
7280 const void* pUserData);
7281 void RecordCreateLostAllocation(uint32_t frameIndex,
7283 void RecordMapMemory(uint32_t frameIndex,
7285 void RecordUnmapMemory(uint32_t frameIndex,
7287 void RecordFlushAllocation(uint32_t frameIndex,
7288 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7289 void RecordInvalidateAllocation(uint32_t frameIndex,
7290 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7291 void RecordCreateBuffer(uint32_t frameIndex,
7292 const VkBufferCreateInfo& bufCreateInfo,
7295 void RecordCreateImage(uint32_t frameIndex,
7296 const VkImageCreateInfo& imageCreateInfo,
7299 void RecordDestroyBuffer(uint32_t frameIndex,
7301 void RecordDestroyImage(uint32_t frameIndex,
7303 void RecordTouchAllocation(uint32_t frameIndex,
7305 void RecordGetAllocationInfo(uint32_t frameIndex,
7307 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7309 void RecordDefragmentationBegin(uint32_t frameIndex,
7312 void RecordDefragmentationEnd(uint32_t frameIndex,
7314 void RecordSetPoolName(uint32_t frameIndex,
7325 class UserDataString
7329 const char* GetString()
const {
return m_Str; }
7339 VMA_MUTEX m_FileMutex;
7341 int64_t m_StartCounter;
7343 void GetBasicParams(CallParams& outParams);
7346 template<
typename T>
7347 void PrintPointerList(uint64_t count,
const T* pItems)
7351 fprintf(m_File,
"%p", pItems[0]);
7352 for(uint64_t i = 1; i < count; ++i)
7354 fprintf(m_File,
" %p", pItems[i]);
7359 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7363 #endif // #if VMA_RECORDING_ENABLED
7368 class VmaAllocationObjectAllocator
7370 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7372 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7374 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7379 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7382 struct VmaCurrentBudgetData
7384 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7385 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7387 #if VMA_MEMORY_BUDGET
7388 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7389 VMA_RW_MUTEX m_BudgetMutex;
7390 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7391 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7392 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7393 #endif // #if VMA_MEMORY_BUDGET
7395 VmaCurrentBudgetData()
7397 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7399 m_BlockBytes[heapIndex] = 0;
7400 m_AllocationBytes[heapIndex] = 0;
7401 #if VMA_MEMORY_BUDGET
7402 m_VulkanUsage[heapIndex] = 0;
7403 m_VulkanBudget[heapIndex] = 0;
7404 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7408 #if VMA_MEMORY_BUDGET
7409 m_OperationsSinceBudgetFetch = 0;
7413 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7415 m_AllocationBytes[heapIndex] += allocationSize;
7416 #if VMA_MEMORY_BUDGET
7417 ++m_OperationsSinceBudgetFetch;
7421 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7423 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7424 m_AllocationBytes[heapIndex] -= allocationSize;
7425 #if VMA_MEMORY_BUDGET
7426 ++m_OperationsSinceBudgetFetch;
7432 struct VmaAllocator_T
7434 VMA_CLASS_NO_COPY(VmaAllocator_T)
7437 uint32_t m_VulkanApiVersion;
7438 bool m_UseKhrDedicatedAllocation;
7439 bool m_UseKhrBindMemory2;
7440 bool m_UseExtMemoryBudget;
7441 bool m_UseAmdDeviceCoherentMemory;
7442 bool m_UseKhrBufferDeviceAddress;
7444 VkInstance m_hInstance;
7445 bool m_AllocationCallbacksSpecified;
7446 VkAllocationCallbacks m_AllocationCallbacks;
7448 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7451 uint32_t m_HeapSizeLimitMask;
7453 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7454 VkPhysicalDeviceMemoryProperties m_MemProps;
7457 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7460 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7461 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7462 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7464 VmaCurrentBudgetData m_Budget;
7470 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7472 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7476 return m_VulkanFunctions;
7479 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7481 VkDeviceSize GetBufferImageGranularity()
const
7484 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7485 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7488 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7489 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7491 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7493 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7494 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7497 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7499 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7500 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7503 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7505 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7506 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7507 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7510 bool IsIntegratedGpu()
const
7512 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7515 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7517 #if VMA_RECORDING_ENABLED
7518 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7521 void GetBufferMemoryRequirements(
7523 VkMemoryRequirements& memReq,
7524 bool& requiresDedicatedAllocation,
7525 bool& prefersDedicatedAllocation)
const;
7526 void GetImageMemoryRequirements(
7528 VkMemoryRequirements& memReq,
7529 bool& requiresDedicatedAllocation,
7530 bool& prefersDedicatedAllocation)
const;
7533 VkResult AllocateMemory(
7534 const VkMemoryRequirements& vkMemReq,
7535 bool requiresDedicatedAllocation,
7536 bool prefersDedicatedAllocation,
7537 VkBuffer dedicatedBuffer,
7538 VkBufferUsageFlags dedicatedBufferUsage,
7539 VkImage dedicatedImage,
7541 VmaSuballocationType suballocType,
7542 size_t allocationCount,
7547 size_t allocationCount,
7550 VkResult ResizeAllocation(
7552 VkDeviceSize newSize);
7554 void CalculateStats(
VmaStats* pStats);
7557 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7559 #if VMA_STATS_STRING_ENABLED
7560 void PrintDetailedMap(
class VmaJsonWriter& json);
7563 VkResult DefragmentationBegin(
7567 VkResult DefragmentationEnd(
7570 VkResult DefragmentationPassBegin(
7573 VkResult DefragmentationPassEnd(
7580 void DestroyPool(
VmaPool pool);
7583 void SetCurrentFrameIndex(uint32_t frameIndex);
7584 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7586 void MakePoolAllocationsLost(
7588 size_t* pLostAllocationCount);
7589 VkResult CheckPoolCorruption(
VmaPool hPool);
7590 VkResult CheckCorruption(uint32_t memoryTypeBits);
7595 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7597 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7599 VkResult BindVulkanBuffer(
7600 VkDeviceMemory memory,
7601 VkDeviceSize memoryOffset,
7605 VkResult BindVulkanImage(
7606 VkDeviceMemory memory,
7607 VkDeviceSize memoryOffset,
7614 VkResult BindBufferMemory(
7616 VkDeviceSize allocationLocalOffset,
7619 VkResult BindImageMemory(
7621 VkDeviceSize allocationLocalOffset,
7625 void FlushOrInvalidateAllocation(
7627 VkDeviceSize offset, VkDeviceSize size,
7628 VMA_CACHE_OPERATION op);
7630 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7636 uint32_t GetGpuDefragmentationMemoryTypeBits();
7639 VkDeviceSize m_PreferredLargeHeapBlockSize;
7641 VkPhysicalDevice m_PhysicalDevice;
7642 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7643 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7645 VMA_RW_MUTEX m_PoolsMutex;
7647 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7648 uint32_t m_NextPoolId;
7653 uint32_t m_GlobalMemoryTypeBits;
7655 #if VMA_RECORDING_ENABLED
7656 VmaRecorder* m_pRecorder;
7661 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
7662 void ImportVulkanFunctions_Static();
7667 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
7668 void ImportVulkanFunctions_Dynamic();
7671 void ValidateVulkanFunctions();
7673 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7675 VkResult AllocateMemoryOfType(
7677 VkDeviceSize alignment,
7678 bool dedicatedAllocation,
7679 VkBuffer dedicatedBuffer,
7680 VkBufferUsageFlags dedicatedBufferUsage,
7681 VkImage dedicatedImage,
7683 uint32_t memTypeIndex,
7684 VmaSuballocationType suballocType,
7685 size_t allocationCount,
7689 VkResult AllocateDedicatedMemoryPage(
7691 VmaSuballocationType suballocType,
7692 uint32_t memTypeIndex,
7693 const VkMemoryAllocateInfo& allocInfo,
7695 bool isUserDataString,
7700 VkResult AllocateDedicatedMemory(
7702 VmaSuballocationType suballocType,
7703 uint32_t memTypeIndex,
7706 bool isUserDataString,
7708 VkBuffer dedicatedBuffer,
7709 VkBufferUsageFlags dedicatedBufferUsage,
7710 VkImage dedicatedImage,
7711 size_t allocationCount,
7720 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7722 uint32_t CalculateGlobalMemoryTypeBits()
const;
7724 #if VMA_MEMORY_BUDGET
7725 void UpdateVulkanBudget();
7726 #endif // #if VMA_MEMORY_BUDGET
7732 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7734 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7737 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7739 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7742 template<
typename T>
7745 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7748 template<
typename T>
7749 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7751 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7754 template<
typename T>
7755 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7760 VmaFree(hAllocator, ptr);
7764 template<
typename T>
7765 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7769 for(
size_t i = count; i--; )
7771 VmaFree(hAllocator, ptr);
7778 #if VMA_STATS_STRING_ENABLED
7780 class VmaStringBuilder
7783 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7784 size_t GetLength()
const {
return m_Data.size(); }
7785 const char* GetData()
const {
return m_Data.data(); }
7787 void Add(
char ch) { m_Data.push_back(ch); }
7788 void Add(
const char* pStr);
7789 void AddNewLine() { Add(
'\n'); }
7790 void AddNumber(uint32_t num);
7791 void AddNumber(uint64_t num);
7792 void AddPointer(
const void* ptr);
7795 VmaVector< char, VmaStlAllocator<char> > m_Data;
7798 void VmaStringBuilder::Add(
const char* pStr)
7800 const size_t strLen = strlen(pStr);
7803 const size_t oldCount = m_Data.size();
7804 m_Data.resize(oldCount + strLen);
7805 memcpy(m_Data.data() + oldCount, pStr, strLen);
7809 void VmaStringBuilder::AddNumber(uint32_t num)
7816 *--p =
'0' + (num % 10);
7823 void VmaStringBuilder::AddNumber(uint64_t num)
7830 *--p =
'0' + (num % 10);
7837 void VmaStringBuilder::AddPointer(
const void* ptr)
7840 VmaPtrToStr(buf,
sizeof(buf), ptr);
7844 #endif // #if VMA_STATS_STRING_ENABLED
7849 #if VMA_STATS_STRING_ENABLED
7853 VMA_CLASS_NO_COPY(VmaJsonWriter)
7855 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7858 void BeginObject(
bool singleLine =
false);
7861 void BeginArray(
bool singleLine =
false);
7864 void WriteString(
const char* pStr);
7865 void BeginString(
const char* pStr = VMA_NULL);
7866 void ContinueString(
const char* pStr);
7867 void ContinueString(uint32_t n);
7868 void ContinueString(uint64_t n);
7869 void ContinueString_Pointer(
const void* ptr);
7870 void EndString(
const char* pStr = VMA_NULL);
7872 void WriteNumber(uint32_t n);
7873 void WriteNumber(uint64_t n);
7874 void WriteBool(
bool b);
7878 static const char*
const INDENT;
7880 enum COLLECTION_TYPE
7882 COLLECTION_TYPE_OBJECT,
7883 COLLECTION_TYPE_ARRAY,
7887 COLLECTION_TYPE type;
7888 uint32_t valueCount;
7889 bool singleLineMode;
7892 VmaStringBuilder& m_SB;
7893 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7894 bool m_InsideString;
7896 void BeginValue(
bool isString);
7897 void WriteIndent(
bool oneLess =
false);
7900 const char*
const VmaJsonWriter::INDENT =
" ";
7902 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7904 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7905 m_InsideString(false)
7909 VmaJsonWriter::~VmaJsonWriter()
7911 VMA_ASSERT(!m_InsideString);
7912 VMA_ASSERT(m_Stack.empty());
7915 void VmaJsonWriter::BeginObject(
bool singleLine)
7917 VMA_ASSERT(!m_InsideString);
7923 item.type = COLLECTION_TYPE_OBJECT;
7924 item.valueCount = 0;
7925 item.singleLineMode = singleLine;
7926 m_Stack.push_back(item);
7929 void VmaJsonWriter::EndObject()
7931 VMA_ASSERT(!m_InsideString);
7936 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7940 void VmaJsonWriter::BeginArray(
bool singleLine)
7942 VMA_ASSERT(!m_InsideString);
7948 item.type = COLLECTION_TYPE_ARRAY;
7949 item.valueCount = 0;
7950 item.singleLineMode = singleLine;
7951 m_Stack.push_back(item);
7954 void VmaJsonWriter::EndArray()
7956 VMA_ASSERT(!m_InsideString);
7961 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7965 void VmaJsonWriter::WriteString(
const char* pStr)
7971 void VmaJsonWriter::BeginString(
const char* pStr)
7973 VMA_ASSERT(!m_InsideString);
7977 m_InsideString =
true;
7978 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7980 ContinueString(pStr);
7984 void VmaJsonWriter::ContinueString(
const char* pStr)
7986 VMA_ASSERT(m_InsideString);
7988 const size_t strLen = strlen(pStr);
7989 for(
size_t i = 0; i < strLen; ++i)
8022 VMA_ASSERT(0 &&
"Character not currently supported.");
8028 void VmaJsonWriter::ContinueString(uint32_t n)
8030 VMA_ASSERT(m_InsideString);
8034 void VmaJsonWriter::ContinueString(uint64_t n)
8036 VMA_ASSERT(m_InsideString);
8040 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
8042 VMA_ASSERT(m_InsideString);
8043 m_SB.AddPointer(ptr);
8046 void VmaJsonWriter::EndString(
const char* pStr)
8048 VMA_ASSERT(m_InsideString);
8049 if(pStr != VMA_NULL && pStr[0] !=
'\0')
8051 ContinueString(pStr);
8054 m_InsideString =
false;
8057 void VmaJsonWriter::WriteNumber(uint32_t n)
8059 VMA_ASSERT(!m_InsideString);
8064 void VmaJsonWriter::WriteNumber(uint64_t n)
8066 VMA_ASSERT(!m_InsideString);
8071 void VmaJsonWriter::WriteBool(
bool b)
8073 VMA_ASSERT(!m_InsideString);
8075 m_SB.Add(b ?
"true" :
"false");
8078 void VmaJsonWriter::WriteNull()
8080 VMA_ASSERT(!m_InsideString);
8085 void VmaJsonWriter::BeginValue(
bool isString)
8087 if(!m_Stack.empty())
8089 StackItem& currItem = m_Stack.back();
8090 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8091 currItem.valueCount % 2 == 0)
8093 VMA_ASSERT(isString);
8096 if(currItem.type == COLLECTION_TYPE_OBJECT &&
8097 currItem.valueCount % 2 != 0)
8101 else if(currItem.valueCount > 0)
8110 ++currItem.valueCount;
8114 void VmaJsonWriter::WriteIndent(
bool oneLess)
8116 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8120 size_t count = m_Stack.size();
8121 if(count > 0 && oneLess)
8125 for(
size_t i = 0; i < count; ++i)
8132 #endif // #if VMA_STATS_STRING_ENABLED
8136 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8138 if(IsUserDataString())
8140 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8142 FreeUserDataString(hAllocator);
8144 if(pUserData != VMA_NULL)
8146 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8151 m_pUserData = pUserData;
8155 void VmaAllocation_T::ChangeBlockAllocation(
8157 VmaDeviceMemoryBlock* block,
8158 VkDeviceSize offset)
8160 VMA_ASSERT(block != VMA_NULL);
8161 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8164 if(block != m_BlockAllocation.m_Block)
8166 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8167 if(IsPersistentMap())
8169 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8170 block->Map(hAllocator, mapRefCount, VMA_NULL);
8173 m_BlockAllocation.m_Block = block;
8174 m_BlockAllocation.m_Offset = offset;
8177 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8179 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8180 m_BlockAllocation.m_Offset = newOffset;
8183 VkDeviceSize VmaAllocation_T::GetOffset()
const
8187 case ALLOCATION_TYPE_BLOCK:
8188 return m_BlockAllocation.m_Offset;
8189 case ALLOCATION_TYPE_DEDICATED:
8197 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8201 case ALLOCATION_TYPE_BLOCK:
8202 return m_BlockAllocation.m_Block->GetDeviceMemory();
8203 case ALLOCATION_TYPE_DEDICATED:
8204 return m_DedicatedAllocation.m_hMemory;
8207 return VK_NULL_HANDLE;
8211 void* VmaAllocation_T::GetMappedData()
const
8215 case ALLOCATION_TYPE_BLOCK:
8218 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8219 VMA_ASSERT(pBlockData != VMA_NULL);
8220 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8227 case ALLOCATION_TYPE_DEDICATED:
8228 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8229 return m_DedicatedAllocation.m_pMappedData;
8236 bool VmaAllocation_T::CanBecomeLost()
const
8240 case ALLOCATION_TYPE_BLOCK:
8241 return m_BlockAllocation.m_CanBecomeLost;
8242 case ALLOCATION_TYPE_DEDICATED:
8250 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8252 VMA_ASSERT(CanBecomeLost());
8258 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8261 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8266 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8272 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8282 #if VMA_STATS_STRING_ENABLED
8285 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8294 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8296 json.WriteString(
"Type");
8297 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8299 json.WriteString(
"Size");
8300 json.WriteNumber(m_Size);
8302 if(m_pUserData != VMA_NULL)
8304 json.WriteString(
"UserData");
8305 if(IsUserDataString())
8307 json.WriteString((
const char*)m_pUserData);
8312 json.ContinueString_Pointer(m_pUserData);
8317 json.WriteString(
"CreationFrameIndex");
8318 json.WriteNumber(m_CreationFrameIndex);
8320 json.WriteString(
"LastUseFrameIndex");
8321 json.WriteNumber(GetLastUseFrameIndex());
8323 if(m_BufferImageUsage != 0)
8325 json.WriteString(
"Usage");
8326 json.WriteNumber(m_BufferImageUsage);
8332 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8334 VMA_ASSERT(IsUserDataString());
8335 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8336 m_pUserData = VMA_NULL;
8339 void VmaAllocation_T::BlockAllocMap()
8341 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8343 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8349 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8353 void VmaAllocation_T::BlockAllocUnmap()
8355 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8357 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8363 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8367 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8369 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8373 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8375 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8376 *ppData = m_DedicatedAllocation.m_pMappedData;
8382 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8383 return VK_ERROR_MEMORY_MAP_FAILED;
8388 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8389 hAllocator->m_hDevice,
8390 m_DedicatedAllocation.m_hMemory,
8395 if(result == VK_SUCCESS)
8397 m_DedicatedAllocation.m_pMappedData = *ppData;
8404 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8406 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8408 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8413 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8414 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8415 hAllocator->m_hDevice,
8416 m_DedicatedAllocation.m_hMemory);
8421 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8425 #if VMA_STATS_STRING_ENABLED
8427 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8431 json.WriteString(
"Blocks");
8434 json.WriteString(
"Allocations");
8437 json.WriteString(
"UnusedRanges");
8440 json.WriteString(
"UsedBytes");
8443 json.WriteString(
"UnusedBytes");
8448 json.WriteString(
"AllocationSize");
8449 json.BeginObject(
true);
8450 json.WriteString(
"Min");
8452 json.WriteString(
"Avg");
8454 json.WriteString(
"Max");
8461 json.WriteString(
"UnusedRangeSize");
8462 json.BeginObject(
true);
8463 json.WriteString(
"Min");
8465 json.WriteString(
"Avg");
8467 json.WriteString(
"Max");
8475 #endif // #if VMA_STATS_STRING_ENABLED
8477 struct VmaSuballocationItemSizeLess
8480 const VmaSuballocationList::iterator lhs,
8481 const VmaSuballocationList::iterator rhs)
const
8483 return lhs->size < rhs->size;
8486 const VmaSuballocationList::iterator lhs,
8487 VkDeviceSize rhsSize)
const
8489 return lhs->size < rhsSize;
8497 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8499 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8503 #if VMA_STATS_STRING_ENABLED
8505 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8506 VkDeviceSize unusedBytes,
8507 size_t allocationCount,
8508 size_t unusedRangeCount)
const
8512 json.WriteString(
"TotalBytes");
8513 json.WriteNumber(GetSize());
8515 json.WriteString(
"UnusedBytes");
8516 json.WriteNumber(unusedBytes);
8518 json.WriteString(
"Allocations");
8519 json.WriteNumber((uint64_t)allocationCount);
8521 json.WriteString(
"UnusedRanges");
8522 json.WriteNumber((uint64_t)unusedRangeCount);
8524 json.WriteString(
"Suballocations");
8528 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8529 VkDeviceSize offset,
8532 json.BeginObject(
true);
8534 json.WriteString(
"Offset");
8535 json.WriteNumber(offset);
8537 hAllocation->PrintParameters(json);
8542 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8543 VkDeviceSize offset,
8544 VkDeviceSize size)
const
8546 json.BeginObject(
true);
8548 json.WriteString(
"Offset");
8549 json.WriteNumber(offset);
8551 json.WriteString(
"Type");
8552 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8554 json.WriteString(
"Size");
8555 json.WriteNumber(size);
8560 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8566 #endif // #if VMA_STATS_STRING_ENABLED
8571 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8572 VmaBlockMetadata(hAllocator),
8575 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8576 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8580 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8584 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8586 VmaBlockMetadata::Init(size);
8589 m_SumFreeSize = size;
8591 VmaSuballocation suballoc = {};
8592 suballoc.offset = 0;
8593 suballoc.size = size;
8594 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8595 suballoc.hAllocation = VK_NULL_HANDLE;
8597 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8598 m_Suballocations.push_back(suballoc);
8599 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8601 m_FreeSuballocationsBySize.push_back(suballocItem);
8604 bool VmaBlockMetadata_Generic::Validate()
const
8606 VMA_VALIDATE(!m_Suballocations.empty());
8609 VkDeviceSize calculatedOffset = 0;
8611 uint32_t calculatedFreeCount = 0;
8613 VkDeviceSize calculatedSumFreeSize = 0;
8616 size_t freeSuballocationsToRegister = 0;
8618 bool prevFree =
false;
8620 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8621 suballocItem != m_Suballocations.cend();
8624 const VmaSuballocation& subAlloc = *suballocItem;
8627 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8629 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8631 VMA_VALIDATE(!prevFree || !currFree);
8633 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8637 calculatedSumFreeSize += subAlloc.size;
8638 ++calculatedFreeCount;
8639 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8641 ++freeSuballocationsToRegister;
8645 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8649 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8650 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8653 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8656 calculatedOffset += subAlloc.size;
8657 prevFree = currFree;
8662 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8664 VkDeviceSize lastSize = 0;
8665 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8667 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8670 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8672 VMA_VALIDATE(suballocItem->size >= lastSize);
8674 lastSize = suballocItem->size;
8678 VMA_VALIDATE(ValidateFreeSuballocationList());
8679 VMA_VALIDATE(calculatedOffset == GetSize());
8680 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8681 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8686 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8688 if(!m_FreeSuballocationsBySize.empty())
8690 return m_FreeSuballocationsBySize.back()->size;
8698 bool VmaBlockMetadata_Generic::IsEmpty()
const
8700 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8703 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8707 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8719 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8720 suballocItem != m_Suballocations.cend();
8723 const VmaSuballocation& suballoc = *suballocItem;
8724 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8737 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8739 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8741 inoutStats.
size += GetSize();
8748 #if VMA_STATS_STRING_ENABLED
8750 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8752 PrintDetailedMap_Begin(json,
8754 m_Suballocations.size() - (
size_t)m_FreeCount,
8758 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8759 suballocItem != m_Suballocations.cend();
8760 ++suballocItem, ++i)
8762 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8764 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8768 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8772 PrintDetailedMap_End(json);
8775 #endif // #if VMA_STATS_STRING_ENABLED
8777 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8778 uint32_t currentFrameIndex,
8779 uint32_t frameInUseCount,
8780 VkDeviceSize bufferImageGranularity,
8781 VkDeviceSize allocSize,
8782 VkDeviceSize allocAlignment,
8784 VmaSuballocationType allocType,
8785 bool canMakeOtherLost,
8787 VmaAllocationRequest* pAllocationRequest)
8789 VMA_ASSERT(allocSize > 0);
8790 VMA_ASSERT(!upperAddress);
8791 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8792 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8793 VMA_HEAVY_ASSERT(Validate());
8795 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8798 if(canMakeOtherLost ==
false &&
8799 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8805 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8806 if(freeSuballocCount > 0)
8811 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8812 m_FreeSuballocationsBySize.data(),
8813 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8814 allocSize + 2 * VMA_DEBUG_MARGIN,
8815 VmaSuballocationItemSizeLess());
8816 size_t index = it - m_FreeSuballocationsBySize.data();
8817 for(; index < freeSuballocCount; ++index)
8822 bufferImageGranularity,
8826 m_FreeSuballocationsBySize[index],
8828 &pAllocationRequest->offset,
8829 &pAllocationRequest->itemsToMakeLostCount,
8830 &pAllocationRequest->sumFreeSize,
8831 &pAllocationRequest->sumItemSize))
8833 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8838 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8840 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8841 it != m_Suballocations.end();
8844 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8847 bufferImageGranularity,
8853 &pAllocationRequest->offset,
8854 &pAllocationRequest->itemsToMakeLostCount,
8855 &pAllocationRequest->sumFreeSize,
8856 &pAllocationRequest->sumItemSize))
8858 pAllocationRequest->item = it;
8866 for(
size_t index = freeSuballocCount; index--; )
8871 bufferImageGranularity,
8875 m_FreeSuballocationsBySize[index],
8877 &pAllocationRequest->offset,
8878 &pAllocationRequest->itemsToMakeLostCount,
8879 &pAllocationRequest->sumFreeSize,
8880 &pAllocationRequest->sumItemSize))
8882 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8889 if(canMakeOtherLost)
8894 VmaAllocationRequest tmpAllocRequest = {};
8895 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8896 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8897 suballocIt != m_Suballocations.end();
8900 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8901 suballocIt->hAllocation->CanBecomeLost())
8906 bufferImageGranularity,
8912 &tmpAllocRequest.offset,
8913 &tmpAllocRequest.itemsToMakeLostCount,
8914 &tmpAllocRequest.sumFreeSize,
8915 &tmpAllocRequest.sumItemSize))
8919 *pAllocationRequest = tmpAllocRequest;
8920 pAllocationRequest->item = suballocIt;
8923 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8925 *pAllocationRequest = tmpAllocRequest;
8926 pAllocationRequest->item = suballocIt;
8939 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8940 uint32_t currentFrameIndex,
8941 uint32_t frameInUseCount,
8942 VmaAllocationRequest* pAllocationRequest)
8944 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8946 while(pAllocationRequest->itemsToMakeLostCount > 0)
8948 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8950 ++pAllocationRequest->item;
8952 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8953 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8954 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8955 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8957 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8958 --pAllocationRequest->itemsToMakeLostCount;
8966 VMA_HEAVY_ASSERT(Validate());
8967 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8968 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8973 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8975 uint32_t lostAllocationCount = 0;
8976 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8977 it != m_Suballocations.end();
8980 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8981 it->hAllocation->CanBecomeLost() &&
8982 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8984 it = FreeSuballocation(it);
8985 ++lostAllocationCount;
8988 return lostAllocationCount;
8991 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8993 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8994 it != m_Suballocations.end();
8997 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8999 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
9001 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9002 return VK_ERROR_VALIDATION_FAILED_EXT;
9004 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
9006 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9007 return VK_ERROR_VALIDATION_FAILED_EXT;
9015 void VmaBlockMetadata_Generic::Alloc(
9016 const VmaAllocationRequest& request,
9017 VmaSuballocationType type,
9018 VkDeviceSize allocSize,
9021 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
9022 VMA_ASSERT(request.item != m_Suballocations.end());
9023 VmaSuballocation& suballoc = *request.item;
9025 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9027 VMA_ASSERT(request.offset >= suballoc.offset);
9028 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
9029 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
9030 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
9034 UnregisterFreeSuballocation(request.item);
9036 suballoc.offset = request.offset;
9037 suballoc.size = allocSize;
9038 suballoc.type = type;
9039 suballoc.hAllocation = hAllocation;
9044 VmaSuballocation paddingSuballoc = {};
9045 paddingSuballoc.offset = request.offset + allocSize;
9046 paddingSuballoc.size = paddingEnd;
9047 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9048 VmaSuballocationList::iterator next = request.item;
9050 const VmaSuballocationList::iterator paddingEndItem =
9051 m_Suballocations.insert(next, paddingSuballoc);
9052 RegisterFreeSuballocation(paddingEndItem);
9058 VmaSuballocation paddingSuballoc = {};
9059 paddingSuballoc.offset = request.offset - paddingBegin;
9060 paddingSuballoc.size = paddingBegin;
9061 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9062 const VmaSuballocationList::iterator paddingBeginItem =
9063 m_Suballocations.insert(request.item, paddingSuballoc);
9064 RegisterFreeSuballocation(paddingBeginItem);
9068 m_FreeCount = m_FreeCount - 1;
9069 if(paddingBegin > 0)
9077 m_SumFreeSize -= allocSize;
9080 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
9082 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9083 suballocItem != m_Suballocations.end();
9086 VmaSuballocation& suballoc = *suballocItem;
9087 if(suballoc.hAllocation == allocation)
9089 FreeSuballocation(suballocItem);
9090 VMA_HEAVY_ASSERT(Validate());
9094 VMA_ASSERT(0 &&
"Not found!");
9097 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
9099 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
9100 suballocItem != m_Suballocations.end();
9103 VmaSuballocation& suballoc = *suballocItem;
9104 if(suballoc.offset == offset)
9106 FreeSuballocation(suballocItem);
9110 VMA_ASSERT(0 &&
"Not found!");
9113 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9115 VkDeviceSize lastSize = 0;
9116 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9118 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9120 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9121 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9122 VMA_VALIDATE(it->size >= lastSize);
9123 lastSize = it->size;
9128 bool VmaBlockMetadata_Generic::CheckAllocation(
9129 uint32_t currentFrameIndex,
9130 uint32_t frameInUseCount,
9131 VkDeviceSize bufferImageGranularity,
9132 VkDeviceSize allocSize,
9133 VkDeviceSize allocAlignment,
9134 VmaSuballocationType allocType,
9135 VmaSuballocationList::const_iterator suballocItem,
9136 bool canMakeOtherLost,
9137 VkDeviceSize* pOffset,
9138 size_t* itemsToMakeLostCount,
9139 VkDeviceSize* pSumFreeSize,
9140 VkDeviceSize* pSumItemSize)
const
9142 VMA_ASSERT(allocSize > 0);
9143 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9144 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9145 VMA_ASSERT(pOffset != VMA_NULL);
9147 *itemsToMakeLostCount = 0;
9151 if(canMakeOtherLost)
9153 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9155 *pSumFreeSize = suballocItem->size;
9159 if(suballocItem->hAllocation->CanBecomeLost() &&
9160 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9162 ++*itemsToMakeLostCount;
9163 *pSumItemSize = suballocItem->size;
9172 if(GetSize() - suballocItem->offset < allocSize)
9178 *pOffset = suballocItem->offset;
9181 if(VMA_DEBUG_MARGIN > 0)
9183 *pOffset += VMA_DEBUG_MARGIN;
9187 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9191 if(bufferImageGranularity > 1)
9193 bool bufferImageGranularityConflict =
false;
9194 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9195 while(prevSuballocItem != m_Suballocations.cbegin())
9198 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9199 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9201 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9203 bufferImageGranularityConflict =
true;
9211 if(bufferImageGranularityConflict)
9213 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9219 if(*pOffset >= suballocItem->offset + suballocItem->size)
9225 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9228 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9230 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9232 if(suballocItem->offset + totalSize > GetSize())
9239 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9240 if(totalSize > suballocItem->size)
9242 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9243 while(remainingSize > 0)
9246 if(lastSuballocItem == m_Suballocations.cend())
9250 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9252 *pSumFreeSize += lastSuballocItem->size;
9256 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9257 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9258 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9260 ++*itemsToMakeLostCount;
9261 *pSumItemSize += lastSuballocItem->size;
9268 remainingSize = (lastSuballocItem->size < remainingSize) ?
9269 remainingSize - lastSuballocItem->size : 0;
9275 if(bufferImageGranularity > 1)
9277 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9279 while(nextSuballocItem != m_Suballocations.cend())
9281 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9282 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9284 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9286 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9287 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9288 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9290 ++*itemsToMakeLostCount;
9309 const VmaSuballocation& suballoc = *suballocItem;
9310 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9312 *pSumFreeSize = suballoc.size;
9315 if(suballoc.size < allocSize)
9321 *pOffset = suballoc.offset;
9324 if(VMA_DEBUG_MARGIN > 0)
9326 *pOffset += VMA_DEBUG_MARGIN;
9330 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9334 if(bufferImageGranularity > 1)
9336 bool bufferImageGranularityConflict =
false;
9337 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9338 while(prevSuballocItem != m_Suballocations.cbegin())
9341 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9342 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9344 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9346 bufferImageGranularityConflict =
true;
9354 if(bufferImageGranularityConflict)
9356 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9361 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9364 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9367 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9374 if(bufferImageGranularity > 1)
9376 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9378 while(nextSuballocItem != m_Suballocations.cend())
9380 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9381 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9383 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9402 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9404 VMA_ASSERT(item != m_Suballocations.end());
9405 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9407 VmaSuballocationList::iterator nextItem = item;
9409 VMA_ASSERT(nextItem != m_Suballocations.end());
9410 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9412 item->size += nextItem->size;
9414 m_Suballocations.erase(nextItem);
9417 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9420 VmaSuballocation& suballoc = *suballocItem;
9421 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9422 suballoc.hAllocation = VK_NULL_HANDLE;
9426 m_SumFreeSize += suballoc.size;
9429 bool mergeWithNext =
false;
9430 bool mergeWithPrev =
false;
9432 VmaSuballocationList::iterator nextItem = suballocItem;
9434 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9436 mergeWithNext =
true;
9439 VmaSuballocationList::iterator prevItem = suballocItem;
9440 if(suballocItem != m_Suballocations.begin())
9443 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9445 mergeWithPrev =
true;
9451 UnregisterFreeSuballocation(nextItem);
9452 MergeFreeWithNext(suballocItem);
9457 UnregisterFreeSuballocation(prevItem);
9458 MergeFreeWithNext(prevItem);
9459 RegisterFreeSuballocation(prevItem);
9464 RegisterFreeSuballocation(suballocItem);
9465 return suballocItem;
9469 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9471 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9472 VMA_ASSERT(item->size > 0);
9476 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9478 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9480 if(m_FreeSuballocationsBySize.empty())
9482 m_FreeSuballocationsBySize.push_back(item);
9486 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9494 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9496 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9497 VMA_ASSERT(item->size > 0);
9501 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9503 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9505 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9506 m_FreeSuballocationsBySize.data(),
9507 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9509 VmaSuballocationItemSizeLess());
9510 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9511 index < m_FreeSuballocationsBySize.size();
9514 if(m_FreeSuballocationsBySize[index] == item)
9516 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9519 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9521 VMA_ASSERT(0 &&
"Not found.");
9527 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9528 VkDeviceSize bufferImageGranularity,
9529 VmaSuballocationType& inOutPrevSuballocType)
const
9531 if(bufferImageGranularity == 1 || IsEmpty())
9536 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9537 bool typeConflictFound =
false;
9538 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9539 it != m_Suballocations.cend();
9542 const VmaSuballocationType suballocType = it->type;
9543 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9545 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9546 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9548 typeConflictFound =
true;
9550 inOutPrevSuballocType = suballocType;
9554 return typeConflictFound || minAlignment >= bufferImageGranularity;
9560 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9561 VmaBlockMetadata(hAllocator),
9563 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9564 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9565 m_1stVectorIndex(0),
9566 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9567 m_1stNullItemsBeginCount(0),
9568 m_1stNullItemsMiddleCount(0),
9569 m_2ndNullItemsCount(0)
9573 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9577 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9579 VmaBlockMetadata::Init(size);
9580 m_SumFreeSize = size;
9583 bool VmaBlockMetadata_Linear::Validate()
const
9585 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9586 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9588 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9589 VMA_VALIDATE(!suballocations1st.empty() ||
9590 suballocations2nd.empty() ||
9591 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9593 if(!suballocations1st.empty())
9596 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9598 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9600 if(!suballocations2nd.empty())
9603 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9606 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9607 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9609 VkDeviceSize sumUsedSize = 0;
9610 const size_t suballoc1stCount = suballocations1st.size();
9611 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9613 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9615 const size_t suballoc2ndCount = suballocations2nd.size();
9616 size_t nullItem2ndCount = 0;
9617 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9619 const VmaSuballocation& suballoc = suballocations2nd[i];
9620 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9622 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9623 VMA_VALIDATE(suballoc.offset >= offset);
9627 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9628 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9629 sumUsedSize += suballoc.size;
9636 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9639 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9642 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9644 const VmaSuballocation& suballoc = suballocations1st[i];
9645 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9646 suballoc.hAllocation == VK_NULL_HANDLE);
9649 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9651 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9653 const VmaSuballocation& suballoc = suballocations1st[i];
9654 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9656 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9657 VMA_VALIDATE(suballoc.offset >= offset);
9658 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9662 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9663 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9664 sumUsedSize += suballoc.size;
9671 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9673 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9675 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9677 const size_t suballoc2ndCount = suballocations2nd.size();
9678 size_t nullItem2ndCount = 0;
9679 for(
size_t i = suballoc2ndCount; i--; )
9681 const VmaSuballocation& suballoc = suballocations2nd[i];
9682 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9684 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9685 VMA_VALIDATE(suballoc.offset >= offset);
9689 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9690 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9691 sumUsedSize += suballoc.size;
9698 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9701 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9704 VMA_VALIDATE(offset <= GetSize());
9705 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9710 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9712 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9713 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9716 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9718 const VkDeviceSize size = GetSize();
9730 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9732 switch(m_2ndVectorMode)
9734 case SECOND_VECTOR_EMPTY:
9740 const size_t suballocations1stCount = suballocations1st.size();
9741 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9742 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9743 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9745 firstSuballoc.offset,
9746 size - (lastSuballoc.offset + lastSuballoc.size));
9750 case SECOND_VECTOR_RING_BUFFER:
9755 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9756 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9757 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9758 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9762 case SECOND_VECTOR_DOUBLE_STACK:
9767 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9768 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9769 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9770 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9780 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9782 const VkDeviceSize size = GetSize();
9783 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9784 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9785 const size_t suballoc1stCount = suballocations1st.size();
9786 const size_t suballoc2ndCount = suballocations2nd.size();
9797 VkDeviceSize lastOffset = 0;
9799 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9801 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9802 size_t nextAlloc2ndIndex = 0;
9803 while(lastOffset < freeSpace2ndTo1stEnd)
9806 while(nextAlloc2ndIndex < suballoc2ndCount &&
9807 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9809 ++nextAlloc2ndIndex;
9813 if(nextAlloc2ndIndex < suballoc2ndCount)
9815 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9818 if(lastOffset < suballoc.offset)
9821 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9835 lastOffset = suballoc.offset + suballoc.size;
9836 ++nextAlloc2ndIndex;
9842 if(lastOffset < freeSpace2ndTo1stEnd)
9844 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9852 lastOffset = freeSpace2ndTo1stEnd;
9857 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9858 const VkDeviceSize freeSpace1stTo2ndEnd =
9859 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9860 while(lastOffset < freeSpace1stTo2ndEnd)
9863 while(nextAlloc1stIndex < suballoc1stCount &&
9864 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9866 ++nextAlloc1stIndex;
9870 if(nextAlloc1stIndex < suballoc1stCount)
9872 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9875 if(lastOffset < suballoc.offset)
9878 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9892 lastOffset = suballoc.offset + suballoc.size;
9893 ++nextAlloc1stIndex;
9899 if(lastOffset < freeSpace1stTo2ndEnd)
9901 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9909 lastOffset = freeSpace1stTo2ndEnd;
9913 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9915 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9916 while(lastOffset < size)
9919 while(nextAlloc2ndIndex != SIZE_MAX &&
9920 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9922 --nextAlloc2ndIndex;
9926 if(nextAlloc2ndIndex != SIZE_MAX)
9928 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9931 if(lastOffset < suballoc.offset)
9934 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9948 lastOffset = suballoc.offset + suballoc.size;
9949 --nextAlloc2ndIndex;
9955 if(lastOffset < size)
9957 const VkDeviceSize unusedRangeSize = size - lastOffset;
9973 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9975 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9976 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9977 const VkDeviceSize size = GetSize();
9978 const size_t suballoc1stCount = suballocations1st.size();
9979 const size_t suballoc2ndCount = suballocations2nd.size();
9981 inoutStats.
size += size;
9983 VkDeviceSize lastOffset = 0;
9985 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9987 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9988 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9989 while(lastOffset < freeSpace2ndTo1stEnd)
9992 while(nextAlloc2ndIndex < suballoc2ndCount &&
9993 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9995 ++nextAlloc2ndIndex;
9999 if(nextAlloc2ndIndex < suballoc2ndCount)
10001 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10004 if(lastOffset < suballoc.offset)
10007 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10018 lastOffset = suballoc.offset + suballoc.size;
10019 ++nextAlloc2ndIndex;
10024 if(lastOffset < freeSpace2ndTo1stEnd)
10027 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10034 lastOffset = freeSpace2ndTo1stEnd;
10039 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10040 const VkDeviceSize freeSpace1stTo2ndEnd =
10041 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10042 while(lastOffset < freeSpace1stTo2ndEnd)
10045 while(nextAlloc1stIndex < suballoc1stCount &&
10046 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10048 ++nextAlloc1stIndex;
10052 if(nextAlloc1stIndex < suballoc1stCount)
10054 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10057 if(lastOffset < suballoc.offset)
10060 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10071 lastOffset = suballoc.offset + suballoc.size;
10072 ++nextAlloc1stIndex;
10077 if(lastOffset < freeSpace1stTo2ndEnd)
10080 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10087 lastOffset = freeSpace1stTo2ndEnd;
10091 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10093 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10094 while(lastOffset < size)
10097 while(nextAlloc2ndIndex != SIZE_MAX &&
10098 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10100 --nextAlloc2ndIndex;
10104 if(nextAlloc2ndIndex != SIZE_MAX)
10106 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10109 if(lastOffset < suballoc.offset)
10112 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10123 lastOffset = suballoc.offset + suballoc.size;
10124 --nextAlloc2ndIndex;
10129 if(lastOffset < size)
10132 const VkDeviceSize unusedRangeSize = size - lastOffset;
10145 #if VMA_STATS_STRING_ENABLED
10146 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10148 const VkDeviceSize size = GetSize();
10149 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10150 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10151 const size_t suballoc1stCount = suballocations1st.size();
10152 const size_t suballoc2ndCount = suballocations2nd.size();
10156 size_t unusedRangeCount = 0;
10157 VkDeviceSize usedBytes = 0;
10159 VkDeviceSize lastOffset = 0;
10161 size_t alloc2ndCount = 0;
10162 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10164 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10165 size_t nextAlloc2ndIndex = 0;
10166 while(lastOffset < freeSpace2ndTo1stEnd)
10169 while(nextAlloc2ndIndex < suballoc2ndCount &&
10170 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10172 ++nextAlloc2ndIndex;
10176 if(nextAlloc2ndIndex < suballoc2ndCount)
10178 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10181 if(lastOffset < suballoc.offset)
10184 ++unusedRangeCount;
10190 usedBytes += suballoc.size;
10193 lastOffset = suballoc.offset + suballoc.size;
10194 ++nextAlloc2ndIndex;
10199 if(lastOffset < freeSpace2ndTo1stEnd)
10202 ++unusedRangeCount;
10206 lastOffset = freeSpace2ndTo1stEnd;
10211 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10212 size_t alloc1stCount = 0;
10213 const VkDeviceSize freeSpace1stTo2ndEnd =
10214 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10215 while(lastOffset < freeSpace1stTo2ndEnd)
10218 while(nextAlloc1stIndex < suballoc1stCount &&
10219 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10221 ++nextAlloc1stIndex;
10225 if(nextAlloc1stIndex < suballoc1stCount)
10227 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10230 if(lastOffset < suballoc.offset)
10233 ++unusedRangeCount;
10239 usedBytes += suballoc.size;
10242 lastOffset = suballoc.offset + suballoc.size;
10243 ++nextAlloc1stIndex;
10248 if(lastOffset < size)
10251 ++unusedRangeCount;
10255 lastOffset = freeSpace1stTo2ndEnd;
10259 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10261 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10262 while(lastOffset < size)
10265 while(nextAlloc2ndIndex != SIZE_MAX &&
10266 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10268 --nextAlloc2ndIndex;
10272 if(nextAlloc2ndIndex != SIZE_MAX)
10274 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10277 if(lastOffset < suballoc.offset)
10280 ++unusedRangeCount;
10286 usedBytes += suballoc.size;
10289 lastOffset = suballoc.offset + suballoc.size;
10290 --nextAlloc2ndIndex;
10295 if(lastOffset < size)
10298 ++unusedRangeCount;
10307 const VkDeviceSize unusedBytes = size - usedBytes;
10308 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10313 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10315 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10316 size_t nextAlloc2ndIndex = 0;
10317 while(lastOffset < freeSpace2ndTo1stEnd)
10320 while(nextAlloc2ndIndex < suballoc2ndCount &&
10321 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10323 ++nextAlloc2ndIndex;
10327 if(nextAlloc2ndIndex < suballoc2ndCount)
10329 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10332 if(lastOffset < suballoc.offset)
10335 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10336 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10341 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10344 lastOffset = suballoc.offset + suballoc.size;
10345 ++nextAlloc2ndIndex;
10350 if(lastOffset < freeSpace2ndTo1stEnd)
10353 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10354 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10358 lastOffset = freeSpace2ndTo1stEnd;
10363 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10364 while(lastOffset < freeSpace1stTo2ndEnd)
10367 while(nextAlloc1stIndex < suballoc1stCount &&
10368 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10370 ++nextAlloc1stIndex;
10374 if(nextAlloc1stIndex < suballoc1stCount)
10376 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10379 if(lastOffset < suballoc.offset)
10382 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10383 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10388 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10391 lastOffset = suballoc.offset + suballoc.size;
10392 ++nextAlloc1stIndex;
10397 if(lastOffset < freeSpace1stTo2ndEnd)
10400 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10401 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10405 lastOffset = freeSpace1stTo2ndEnd;
10409 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10411 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10412 while(lastOffset < size)
10415 while(nextAlloc2ndIndex != SIZE_MAX &&
10416 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10418 --nextAlloc2ndIndex;
10422 if(nextAlloc2ndIndex != SIZE_MAX)
10424 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10427 if(lastOffset < suballoc.offset)
10430 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10431 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10436 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10439 lastOffset = suballoc.offset + suballoc.size;
10440 --nextAlloc2ndIndex;
10445 if(lastOffset < size)
10448 const VkDeviceSize unusedRangeSize = size - lastOffset;
10449 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10458 PrintDetailedMap_End(json);
10460 #endif // #if VMA_STATS_STRING_ENABLED
10462 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10463 uint32_t currentFrameIndex,
10464 uint32_t frameInUseCount,
10465 VkDeviceSize bufferImageGranularity,
10466 VkDeviceSize allocSize,
10467 VkDeviceSize allocAlignment,
10469 VmaSuballocationType allocType,
10470 bool canMakeOtherLost,
10472 VmaAllocationRequest* pAllocationRequest)
10474 VMA_ASSERT(allocSize > 0);
10475 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10476 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10477 VMA_HEAVY_ASSERT(Validate());
10478 return upperAddress ?
10479 CreateAllocationRequest_UpperAddress(
10480 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10481 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10482 CreateAllocationRequest_LowerAddress(
10483 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10484 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10487 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10488 uint32_t currentFrameIndex,
10489 uint32_t frameInUseCount,
10490 VkDeviceSize bufferImageGranularity,
10491 VkDeviceSize allocSize,
10492 VkDeviceSize allocAlignment,
10493 VmaSuballocationType allocType,
10494 bool canMakeOtherLost,
10496 VmaAllocationRequest* pAllocationRequest)
10498 const VkDeviceSize size = GetSize();
10499 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10500 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10502 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10504 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10509 if(allocSize > size)
10513 VkDeviceSize resultBaseOffset = size - allocSize;
10514 if(!suballocations2nd.empty())
10516 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10517 resultBaseOffset = lastSuballoc.offset - allocSize;
10518 if(allocSize > lastSuballoc.offset)
10525 VkDeviceSize resultOffset = resultBaseOffset;
10528 if(VMA_DEBUG_MARGIN > 0)
10530 if(resultOffset < VMA_DEBUG_MARGIN)
10534 resultOffset -= VMA_DEBUG_MARGIN;
10538 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10542 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10544 bool bufferImageGranularityConflict =
false;
10545 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10547 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10548 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10550 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10552 bufferImageGranularityConflict =
true;
10560 if(bufferImageGranularityConflict)
10562 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10567 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10568 suballocations1st.back().offset + suballocations1st.back().size :
10570 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10574 if(bufferImageGranularity > 1)
10576 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10578 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10579 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10581 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10595 pAllocationRequest->offset = resultOffset;
10596 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10597 pAllocationRequest->sumItemSize = 0;
10599 pAllocationRequest->itemsToMakeLostCount = 0;
10600 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10607 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10608 uint32_t currentFrameIndex,
10609 uint32_t frameInUseCount,
10610 VkDeviceSize bufferImageGranularity,
10611 VkDeviceSize allocSize,
10612 VkDeviceSize allocAlignment,
10613 VmaSuballocationType allocType,
10614 bool canMakeOtherLost,
10616 VmaAllocationRequest* pAllocationRequest)
10618 const VkDeviceSize size = GetSize();
10619 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10620 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10622 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10626 VkDeviceSize resultBaseOffset = 0;
10627 if(!suballocations1st.empty())
10629 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10630 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10634 VkDeviceSize resultOffset = resultBaseOffset;
10637 if(VMA_DEBUG_MARGIN > 0)
10639 resultOffset += VMA_DEBUG_MARGIN;
10643 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10647 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10649 bool bufferImageGranularityConflict =
false;
10650 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10652 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10653 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10655 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10657 bufferImageGranularityConflict =
true;
10665 if(bufferImageGranularityConflict)
10667 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10671 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10672 suballocations2nd.back().offset : size;
10675 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10679 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10681 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10683 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10684 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10686 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10700 pAllocationRequest->offset = resultOffset;
10701 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10702 pAllocationRequest->sumItemSize = 0;
10704 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10705 pAllocationRequest->itemsToMakeLostCount = 0;
10712 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10714 VMA_ASSERT(!suballocations1st.empty());
10716 VkDeviceSize resultBaseOffset = 0;
10717 if(!suballocations2nd.empty())
10719 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10720 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10724 VkDeviceSize resultOffset = resultBaseOffset;
10727 if(VMA_DEBUG_MARGIN > 0)
10729 resultOffset += VMA_DEBUG_MARGIN;
10733 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10737 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10739 bool bufferImageGranularityConflict =
false;
10740 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10742 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10743 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10745 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10747 bufferImageGranularityConflict =
true;
10755 if(bufferImageGranularityConflict)
10757 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10761 pAllocationRequest->itemsToMakeLostCount = 0;
10762 pAllocationRequest->sumItemSize = 0;
10763 size_t index1st = m_1stNullItemsBeginCount;
10765 if(canMakeOtherLost)
10767 while(index1st < suballocations1st.size() &&
10768 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10771 const VmaSuballocation& suballoc = suballocations1st[index1st];
10772 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10778 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10779 if(suballoc.hAllocation->CanBecomeLost() &&
10780 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10782 ++pAllocationRequest->itemsToMakeLostCount;
10783 pAllocationRequest->sumItemSize += suballoc.size;
10795 if(bufferImageGranularity > 1)
10797 while(index1st < suballocations1st.size())
10799 const VmaSuballocation& suballoc = suballocations1st[index1st];
10800 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10802 if(suballoc.hAllocation != VK_NULL_HANDLE)
10805 if(suballoc.hAllocation->CanBecomeLost() &&
10806 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10808 ++pAllocationRequest->itemsToMakeLostCount;
10809 pAllocationRequest->sumItemSize += suballoc.size;
10827 if(index1st == suballocations1st.size() &&
10828 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10831 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10836 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10837 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10841 if(bufferImageGranularity > 1)
10843 for(
size_t nextSuballocIndex = index1st;
10844 nextSuballocIndex < suballocations1st.size();
10845 nextSuballocIndex++)
10847 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10848 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10850 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10864 pAllocationRequest->offset = resultOffset;
10865 pAllocationRequest->sumFreeSize =
10866 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10868 - pAllocationRequest->sumItemSize;
10869 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10878 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10879 uint32_t currentFrameIndex,
10880 uint32_t frameInUseCount,
10881 VmaAllocationRequest* pAllocationRequest)
10883 if(pAllocationRequest->itemsToMakeLostCount == 0)
10888 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10891 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10892 size_t index = m_1stNullItemsBeginCount;
10893 size_t madeLostCount = 0;
10894 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10896 if(index == suballocations->size())
10900 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10902 suballocations = &AccessSuballocations2nd();
10906 VMA_ASSERT(!suballocations->empty());
10908 VmaSuballocation& suballoc = (*suballocations)[index];
10909 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10911 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10912 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10913 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10915 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10916 suballoc.hAllocation = VK_NULL_HANDLE;
10917 m_SumFreeSize += suballoc.size;
10918 if(suballocations == &AccessSuballocations1st())
10920 ++m_1stNullItemsMiddleCount;
10924 ++m_2ndNullItemsCount;
10936 CleanupAfterFree();
10942 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10944 uint32_t lostAllocationCount = 0;
10946 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10947 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10949 VmaSuballocation& suballoc = suballocations1st[i];
10950 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10951 suballoc.hAllocation->CanBecomeLost() &&
10952 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10954 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10955 suballoc.hAllocation = VK_NULL_HANDLE;
10956 ++m_1stNullItemsMiddleCount;
10957 m_SumFreeSize += suballoc.size;
10958 ++lostAllocationCount;
10962 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10963 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10965 VmaSuballocation& suballoc = suballocations2nd[i];
10966 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10967 suballoc.hAllocation->CanBecomeLost() &&
10968 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10970 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10971 suballoc.hAllocation = VK_NULL_HANDLE;
10972 ++m_2ndNullItemsCount;
10973 m_SumFreeSize += suballoc.size;
10974 ++lostAllocationCount;
10978 if(lostAllocationCount)
10980 CleanupAfterFree();
10983 return lostAllocationCount;
10986 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10988 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10989 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10991 const VmaSuballocation& suballoc = suballocations1st[i];
10992 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10994 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10996 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10997 return VK_ERROR_VALIDATION_FAILED_EXT;
10999 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11001 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11002 return VK_ERROR_VALIDATION_FAILED_EXT;
11007 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11008 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
11010 const VmaSuballocation& suballoc = suballocations2nd[i];
11011 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
11013 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
11015 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
11016 return VK_ERROR_VALIDATION_FAILED_EXT;
11018 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
11020 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
11021 return VK_ERROR_VALIDATION_FAILED_EXT;
11029 void VmaBlockMetadata_Linear::Alloc(
11030 const VmaAllocationRequest& request,
11031 VmaSuballocationType type,
11032 VkDeviceSize allocSize,
11035 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
11037 switch(request.type)
11039 case VmaAllocationRequestType::UpperAddress:
11041 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
11042 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
11043 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11044 suballocations2nd.push_back(newSuballoc);
11045 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
11048 case VmaAllocationRequestType::EndOf1st:
11050 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11052 VMA_ASSERT(suballocations1st.empty() ||
11053 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
11055 VMA_ASSERT(request.offset + allocSize <= GetSize());
11057 suballocations1st.push_back(newSuballoc);
11060 case VmaAllocationRequestType::EndOf2nd:
11062 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11064 VMA_ASSERT(!suballocations1st.empty() &&
11065 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
11066 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11068 switch(m_2ndVectorMode)
11070 case SECOND_VECTOR_EMPTY:
11072 VMA_ASSERT(suballocations2nd.empty());
11073 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
11075 case SECOND_VECTOR_RING_BUFFER:
11077 VMA_ASSERT(!suballocations2nd.empty());
11079 case SECOND_VECTOR_DOUBLE_STACK:
11080 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
11086 suballocations2nd.push_back(newSuballoc);
11090 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
11093 m_SumFreeSize -= newSuballoc.size;
11096 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
11098 FreeAtOffset(allocation->GetOffset());
11101 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11103 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11104 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11106 if(!suballocations1st.empty())
11109 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11110 if(firstSuballoc.offset == offset)
11112 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11113 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11114 m_SumFreeSize += firstSuballoc.size;
11115 ++m_1stNullItemsBeginCount;
11116 CleanupAfterFree();
11122 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11123 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11125 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11126 if(lastSuballoc.offset == offset)
11128 m_SumFreeSize += lastSuballoc.size;
11129 suballocations2nd.pop_back();
11130 CleanupAfterFree();
11135 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11137 VmaSuballocation& lastSuballoc = suballocations1st.back();
11138 if(lastSuballoc.offset == offset)
11140 m_SumFreeSize += lastSuballoc.size;
11141 suballocations1st.pop_back();
11142 CleanupAfterFree();
11149 VmaSuballocation refSuballoc;
11150 refSuballoc.offset = offset;
11152 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11153 suballocations1st.begin() + m_1stNullItemsBeginCount,
11154 suballocations1st.end(),
11156 VmaSuballocationOffsetLess());
11157 if(it != suballocations1st.end())
11159 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11160 it->hAllocation = VK_NULL_HANDLE;
11161 ++m_1stNullItemsMiddleCount;
11162 m_SumFreeSize += it->size;
11163 CleanupAfterFree();
11168 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11171 VmaSuballocation refSuballoc;
11172 refSuballoc.offset = offset;
11174 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11175 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11176 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11177 if(it != suballocations2nd.end())
11179 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11180 it->hAllocation = VK_NULL_HANDLE;
11181 ++m_2ndNullItemsCount;
11182 m_SumFreeSize += it->size;
11183 CleanupAfterFree();
11188 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11191 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11193 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11194 const size_t suballocCount = AccessSuballocations1st().size();
11195 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11198 void VmaBlockMetadata_Linear::CleanupAfterFree()
11200 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11201 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11205 suballocations1st.clear();
11206 suballocations2nd.clear();
11207 m_1stNullItemsBeginCount = 0;
11208 m_1stNullItemsMiddleCount = 0;
11209 m_2ndNullItemsCount = 0;
11210 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11214 const size_t suballoc1stCount = suballocations1st.size();
11215 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11216 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11219 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11220 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11222 ++m_1stNullItemsBeginCount;
11223 --m_1stNullItemsMiddleCount;
11227 while(m_1stNullItemsMiddleCount > 0 &&
11228 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11230 --m_1stNullItemsMiddleCount;
11231 suballocations1st.pop_back();
11235 while(m_2ndNullItemsCount > 0 &&
11236 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11238 --m_2ndNullItemsCount;
11239 suballocations2nd.pop_back();
11243 while(m_2ndNullItemsCount > 0 &&
11244 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11246 --m_2ndNullItemsCount;
11247 VmaVectorRemove(suballocations2nd, 0);
11250 if(ShouldCompact1st())
11252 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11253 size_t srcIndex = m_1stNullItemsBeginCount;
11254 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11256 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11260 if(dstIndex != srcIndex)
11262 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11266 suballocations1st.resize(nonNullItemCount);
11267 m_1stNullItemsBeginCount = 0;
11268 m_1stNullItemsMiddleCount = 0;
11272 if(suballocations2nd.empty())
11274 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11278 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11280 suballocations1st.clear();
11281 m_1stNullItemsBeginCount = 0;
11283 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11286 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11287 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11288 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11289 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11291 ++m_1stNullItemsBeginCount;
11292 --m_1stNullItemsMiddleCount;
11294 m_2ndNullItemsCount = 0;
11295 m_1stVectorIndex ^= 1;
11300 VMA_HEAVY_ASSERT(Validate());
11307 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11308 VmaBlockMetadata(hAllocator),
11310 m_AllocationCount(0),
11314 memset(m_FreeList, 0,
sizeof(m_FreeList));
11317 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11319 DeleteNode(m_Root);
11322 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11324 VmaBlockMetadata::Init(size);
11326 m_UsableSize = VmaPrevPow2(size);
11327 m_SumFreeSize = m_UsableSize;
11331 while(m_LevelCount < MAX_LEVELS &&
11332 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11337 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11338 rootNode->offset = 0;
11339 rootNode->type = Node::TYPE_FREE;
11340 rootNode->parent = VMA_NULL;
11341 rootNode->buddy = VMA_NULL;
11344 AddToFreeListFront(0, rootNode);
11347 bool VmaBlockMetadata_Buddy::Validate()
const
11350 ValidationContext ctx;
11351 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11353 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11355 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11356 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11359 for(uint32_t level = 0; level < m_LevelCount; ++level)
11361 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11362 m_FreeList[level].front->free.prev == VMA_NULL);
11364 for(Node* node = m_FreeList[level].front;
11366 node = node->free.next)
11368 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11370 if(node->free.next == VMA_NULL)
11372 VMA_VALIDATE(m_FreeList[level].back == node);
11376 VMA_VALIDATE(node->free.next->free.prev == node);
11382 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11384 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11390 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11392 for(uint32_t level = 0; level < m_LevelCount; ++level)
11394 if(m_FreeList[level].front != VMA_NULL)
11396 return LevelToNodeSize(level);
11402 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11404 const VkDeviceSize unusableSize = GetUnusableSize();
11415 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11417 if(unusableSize > 0)
11426 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11428 const VkDeviceSize unusableSize = GetUnusableSize();
11430 inoutStats.
size += GetSize();
11431 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11436 if(unusableSize > 0)
11443 #if VMA_STATS_STRING_ENABLED
11445 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11449 CalcAllocationStatInfo(stat);
11451 PrintDetailedMap_Begin(
11457 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11459 const VkDeviceSize unusableSize = GetUnusableSize();
11460 if(unusableSize > 0)
11462 PrintDetailedMap_UnusedRange(json,
11467 PrintDetailedMap_End(json);
11470 #endif // #if VMA_STATS_STRING_ENABLED
11472 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11473 uint32_t currentFrameIndex,
11474 uint32_t frameInUseCount,
11475 VkDeviceSize bufferImageGranularity,
11476 VkDeviceSize allocSize,
11477 VkDeviceSize allocAlignment,
11479 VmaSuballocationType allocType,
11480 bool canMakeOtherLost,
11482 VmaAllocationRequest* pAllocationRequest)
11484 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11488 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11489 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11490 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11492 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11493 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11496 if(allocSize > m_UsableSize)
11501 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11502 for(uint32_t level = targetLevel + 1; level--; )
11504 for(Node* freeNode = m_FreeList[level].front;
11505 freeNode != VMA_NULL;
11506 freeNode = freeNode->free.next)
11508 if(freeNode->offset % allocAlignment == 0)
11510 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11511 pAllocationRequest->offset = freeNode->offset;
11512 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11513 pAllocationRequest->sumItemSize = 0;
11514 pAllocationRequest->itemsToMakeLostCount = 0;
11515 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11524 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11525 uint32_t currentFrameIndex,
11526 uint32_t frameInUseCount,
11527 VmaAllocationRequest* pAllocationRequest)
11533 return pAllocationRequest->itemsToMakeLostCount == 0;
11536 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11545 void VmaBlockMetadata_Buddy::Alloc(
11546 const VmaAllocationRequest& request,
11547 VmaSuballocationType type,
11548 VkDeviceSize allocSize,
11551 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11553 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11554 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11556 Node* currNode = m_FreeList[currLevel].front;
11557 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11558 while(currNode->offset != request.offset)
11560 currNode = currNode->free.next;
11561 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11565 while(currLevel < targetLevel)
11569 RemoveFromFreeList(currLevel, currNode);
11571 const uint32_t childrenLevel = currLevel + 1;
11574 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11575 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11577 leftChild->offset = currNode->offset;
11578 leftChild->type = Node::TYPE_FREE;
11579 leftChild->parent = currNode;
11580 leftChild->buddy = rightChild;
11582 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11583 rightChild->type = Node::TYPE_FREE;
11584 rightChild->parent = currNode;
11585 rightChild->buddy = leftChild;
11588 currNode->type = Node::TYPE_SPLIT;
11589 currNode->split.leftChild = leftChild;
11592 AddToFreeListFront(childrenLevel, rightChild);
11593 AddToFreeListFront(childrenLevel, leftChild);
11598 currNode = m_FreeList[currLevel].front;
11607 VMA_ASSERT(currLevel == targetLevel &&
11608 currNode != VMA_NULL &&
11609 currNode->type == Node::TYPE_FREE);
11610 RemoveFromFreeList(currLevel, currNode);
11613 currNode->type = Node::TYPE_ALLOCATION;
11614 currNode->allocation.alloc = hAllocation;
11616 ++m_AllocationCount;
11618 m_SumFreeSize -= allocSize;
11621 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11623 if(node->type == Node::TYPE_SPLIT)
11625 DeleteNode(node->split.leftChild->buddy);
11626 DeleteNode(node->split.leftChild);
11629 vma_delete(GetAllocationCallbacks(), node);
11632 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11634 VMA_VALIDATE(level < m_LevelCount);
11635 VMA_VALIDATE(curr->parent == parent);
11636 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11637 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11640 case Node::TYPE_FREE:
11642 ctx.calculatedSumFreeSize += levelNodeSize;
11643 ++ctx.calculatedFreeCount;
11645 case Node::TYPE_ALLOCATION:
11646 ++ctx.calculatedAllocationCount;
11647 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11648 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11650 case Node::TYPE_SPLIT:
11652 const uint32_t childrenLevel = level + 1;
11653 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11654 const Node*
const leftChild = curr->split.leftChild;
11655 VMA_VALIDATE(leftChild != VMA_NULL);
11656 VMA_VALIDATE(leftChild->offset == curr->offset);
11657 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11659 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11661 const Node*
const rightChild = leftChild->buddy;
11662 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11663 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11665 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11676 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11679 uint32_t level = 0;
11680 VkDeviceSize currLevelNodeSize = m_UsableSize;
11681 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11682 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11685 currLevelNodeSize = nextLevelNodeSize;
11686 nextLevelNodeSize = currLevelNodeSize >> 1;
11691 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11694 Node* node = m_Root;
11695 VkDeviceSize nodeOffset = 0;
11696 uint32_t level = 0;
11697 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11698 while(node->type == Node::TYPE_SPLIT)
11700 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11701 if(offset < nodeOffset + nextLevelSize)
11703 node = node->split.leftChild;
11707 node = node->split.leftChild->buddy;
11708 nodeOffset += nextLevelSize;
11711 levelNodeSize = nextLevelSize;
11714 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11715 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11718 --m_AllocationCount;
11719 m_SumFreeSize += alloc->GetSize();
11721 node->type = Node::TYPE_FREE;
11724 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11726 RemoveFromFreeList(level, node->buddy);
11727 Node*
const parent = node->parent;
11729 vma_delete(GetAllocationCallbacks(), node->buddy);
11730 vma_delete(GetAllocationCallbacks(), node);
11731 parent->type = Node::TYPE_FREE;
11739 AddToFreeListFront(level, node);
11742 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11746 case Node::TYPE_FREE:
11752 case Node::TYPE_ALLOCATION:
11754 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11760 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11761 if(unusedRangeSize > 0)
11770 case Node::TYPE_SPLIT:
11772 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11773 const Node*
const leftChild = node->split.leftChild;
11774 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11775 const Node*
const rightChild = leftChild->buddy;
11776 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11784 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11786 VMA_ASSERT(node->type == Node::TYPE_FREE);
11789 Node*
const frontNode = m_FreeList[level].front;
11790 if(frontNode == VMA_NULL)
11792 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11793 node->free.prev = node->free.next = VMA_NULL;
11794 m_FreeList[level].front = m_FreeList[level].back = node;
11798 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11799 node->free.prev = VMA_NULL;
11800 node->free.next = frontNode;
11801 frontNode->free.prev = node;
11802 m_FreeList[level].front = node;
11806 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11808 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11811 if(node->free.prev == VMA_NULL)
11813 VMA_ASSERT(m_FreeList[level].front == node);
11814 m_FreeList[level].front = node->free.next;
11818 Node*
const prevFreeNode = node->free.prev;
11819 VMA_ASSERT(prevFreeNode->free.next == node);
11820 prevFreeNode->free.next = node->free.next;
11824 if(node->free.next == VMA_NULL)
11826 VMA_ASSERT(m_FreeList[level].back == node);
11827 m_FreeList[level].back = node->free.prev;
11831 Node*
const nextFreeNode = node->free.next;
11832 VMA_ASSERT(nextFreeNode->free.prev == node);
11833 nextFreeNode->free.prev = node->free.prev;
11837 #if VMA_STATS_STRING_ENABLED
11838 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11842 case Node::TYPE_FREE:
11843 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11845 case Node::TYPE_ALLOCATION:
11847 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11848 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11849 if(allocSize < levelNodeSize)
11851 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11855 case Node::TYPE_SPLIT:
11857 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11858 const Node*
const leftChild = node->split.leftChild;
11859 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11860 const Node*
const rightChild = leftChild->buddy;
11861 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11868 #endif // #if VMA_STATS_STRING_ENABLED
11874 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11875 m_pMetadata(VMA_NULL),
11876 m_MemoryTypeIndex(UINT32_MAX),
11878 m_hMemory(VK_NULL_HANDLE),
11880 m_pMappedData(VMA_NULL)
11884 void VmaDeviceMemoryBlock::Init(
11887 uint32_t newMemoryTypeIndex,
11888 VkDeviceMemory newMemory,
11889 VkDeviceSize newSize,
11891 uint32_t algorithm)
11893 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11895 m_hParentPool = hParentPool;
11896 m_MemoryTypeIndex = newMemoryTypeIndex;
11898 m_hMemory = newMemory;
11903 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11906 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11912 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11914 m_pMetadata->Init(newSize);
11917 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11921 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11923 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11924 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11925 m_hMemory = VK_NULL_HANDLE;
11927 vma_delete(allocator, m_pMetadata);
11928 m_pMetadata = VMA_NULL;
11931 bool VmaDeviceMemoryBlock::Validate()
const
11933 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11934 (m_pMetadata->GetSize() != 0));
11936 return m_pMetadata->Validate();
11939 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11941 void* pData =
nullptr;
11942 VkResult res = Map(hAllocator, 1, &pData);
11943 if(res != VK_SUCCESS)
11948 res = m_pMetadata->CheckCorruption(pData);
11950 Unmap(hAllocator, 1);
11955 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11962 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11963 if(m_MapCount != 0)
11965 m_MapCount += count;
11966 VMA_ASSERT(m_pMappedData != VMA_NULL);
11967 if(ppData != VMA_NULL)
11969 *ppData = m_pMappedData;
11975 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11976 hAllocator->m_hDevice,
11982 if(result == VK_SUCCESS)
11984 if(ppData != VMA_NULL)
11986 *ppData = m_pMappedData;
11988 m_MapCount = count;
11994 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
12001 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12002 if(m_MapCount >= count)
12004 m_MapCount -= count;
12005 if(m_MapCount == 0)
12007 m_pMappedData = VMA_NULL;
12008 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
12013 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
12017 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12019 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12020 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12023 VkResult res = Map(hAllocator, 1, &pData);
12024 if(res != VK_SUCCESS)
12029 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
12030 VmaWriteMagicValue(pData, allocOffset + allocSize);
12032 Unmap(hAllocator, 1);
12037 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
12039 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
12040 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
12043 VkResult res = Map(hAllocator, 1, &pData);
12044 if(res != VK_SUCCESS)
12049 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
12051 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
12053 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
12055 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
12058 Unmap(hAllocator, 1);
12063 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
12066 VkDeviceSize allocationLocalOffset,
12070 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12071 hAllocation->GetBlock() ==
this);
12072 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12073 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12074 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12076 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12077 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
12080 VkResult VmaDeviceMemoryBlock::BindImageMemory(
12083 VkDeviceSize allocationLocalOffset,
12087 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
12088 hAllocation->GetBlock() ==
this);
12089 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
12090 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
12091 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
12093 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
12094 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
12099 memset(&outInfo, 0,
sizeof(outInfo));
12118 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12126 VmaPool_T::VmaPool_T(
12129 VkDeviceSize preferredBlockSize) :
12133 createInfo.memoryTypeIndex,
12134 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12135 createInfo.minBlockCount,
12136 createInfo.maxBlockCount,
12138 createInfo.frameInUseCount,
12139 createInfo.blockSize != 0,
12146 VmaPool_T::~VmaPool_T()
12150 void VmaPool_T::SetName(
const char* pName)
12152 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12153 VmaFreeString(allocs, m_Name);
12155 if(pName != VMA_NULL)
12157 m_Name = VmaCreateStringCopy(allocs, pName);
12165 #if VMA_STATS_STRING_ENABLED
12167 #endif // #if VMA_STATS_STRING_ENABLED
12169 VmaBlockVector::VmaBlockVector(
12172 uint32_t memoryTypeIndex,
12173 VkDeviceSize preferredBlockSize,
12174 size_t minBlockCount,
12175 size_t maxBlockCount,
12176 VkDeviceSize bufferImageGranularity,
12177 uint32_t frameInUseCount,
12178 bool explicitBlockSize,
12179 uint32_t algorithm) :
12180 m_hAllocator(hAllocator),
12181 m_hParentPool(hParentPool),
12182 m_MemoryTypeIndex(memoryTypeIndex),
12183 m_PreferredBlockSize(preferredBlockSize),
12184 m_MinBlockCount(minBlockCount),
12185 m_MaxBlockCount(maxBlockCount),
12186 m_BufferImageGranularity(bufferImageGranularity),
12187 m_FrameInUseCount(frameInUseCount),
12188 m_ExplicitBlockSize(explicitBlockSize),
12189 m_Algorithm(algorithm),
12190 m_HasEmptyBlock(false),
12191 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12196 VmaBlockVector::~VmaBlockVector()
12198 for(
size_t i = m_Blocks.size(); i--; )
12200 m_Blocks[i]->Destroy(m_hAllocator);
12201 vma_delete(m_hAllocator, m_Blocks[i]);
12205 VkResult VmaBlockVector::CreateMinBlocks()
12207 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12209 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12210 if(res != VK_SUCCESS)
12218 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12220 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12222 const size_t blockCount = m_Blocks.size();
12231 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12233 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12234 VMA_ASSERT(pBlock);
12235 VMA_HEAVY_ASSERT(pBlock->Validate());
12236 pBlock->m_pMetadata->AddPoolStats(*pStats);
12240 bool VmaBlockVector::IsEmpty()
12242 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12243 return m_Blocks.empty();
12246 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12248 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12249 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12250 (VMA_DEBUG_MARGIN > 0) &&
12252 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12255 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12257 VkResult VmaBlockVector::Allocate(
12258 uint32_t currentFrameIndex,
12260 VkDeviceSize alignment,
12262 VmaSuballocationType suballocType,
12263 size_t allocationCount,
12267 VkResult res = VK_SUCCESS;
12269 if(IsCorruptionDetectionEnabled())
12271 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12272 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12276 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12277 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12279 res = AllocatePage(
12285 pAllocations + allocIndex);
12286 if(res != VK_SUCCESS)
12293 if(res != VK_SUCCESS)
12296 while(allocIndex--)
12298 Free(pAllocations[allocIndex]);
12300 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12306 VkResult VmaBlockVector::AllocatePage(
12307 uint32_t currentFrameIndex,
12309 VkDeviceSize alignment,
12311 VmaSuballocationType suballocType,
12319 VkDeviceSize freeMemory;
12321 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12323 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12327 const bool canFallbackToDedicated = !IsCustomPool();
12328 const bool canCreateNewBlock =
12330 (m_Blocks.size() < m_MaxBlockCount) &&
12331 (freeMemory >= size || !canFallbackToDedicated);
12338 canMakeOtherLost =
false;
12342 if(isUpperAddress &&
12345 return VK_ERROR_FEATURE_NOT_PRESENT;
12359 return VK_ERROR_FEATURE_NOT_PRESENT;
12363 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12365 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12373 if(!canMakeOtherLost || canCreateNewBlock)
12382 if(!m_Blocks.empty())
12384 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12385 VMA_ASSERT(pCurrBlock);
12386 VkResult res = AllocateFromBlock(
12396 if(res == VK_SUCCESS)
12398 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12408 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12410 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12411 VMA_ASSERT(pCurrBlock);
12412 VkResult res = AllocateFromBlock(
12422 if(res == VK_SUCCESS)
12424 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12432 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12434 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12435 VMA_ASSERT(pCurrBlock);
12436 VkResult res = AllocateFromBlock(
12446 if(res == VK_SUCCESS)
12448 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12456 if(canCreateNewBlock)
12459 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12460 uint32_t newBlockSizeShift = 0;
12461 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12463 if(!m_ExplicitBlockSize)
12466 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12467 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12469 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12470 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12472 newBlockSize = smallerNewBlockSize;
12473 ++newBlockSizeShift;
12482 size_t newBlockIndex = 0;
12483 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12484 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12486 if(!m_ExplicitBlockSize)
12488 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12490 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12491 if(smallerNewBlockSize >= size)
12493 newBlockSize = smallerNewBlockSize;
12494 ++newBlockSizeShift;
12495 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12496 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12505 if(res == VK_SUCCESS)
12507 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12508 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12510 res = AllocateFromBlock(
12520 if(res == VK_SUCCESS)
12522 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12528 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12535 if(canMakeOtherLost)
12537 uint32_t tryIndex = 0;
12538 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12540 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12541 VmaAllocationRequest bestRequest = {};
12542 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12548 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12550 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12551 VMA_ASSERT(pCurrBlock);
12552 VmaAllocationRequest currRequest = {};
12553 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12556 m_BufferImageGranularity,
12565 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12566 if(pBestRequestBlock == VMA_NULL ||
12567 currRequestCost < bestRequestCost)
12569 pBestRequestBlock = pCurrBlock;
12570 bestRequest = currRequest;
12571 bestRequestCost = currRequestCost;
12573 if(bestRequestCost == 0)
12584 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12586 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12587 VMA_ASSERT(pCurrBlock);
12588 VmaAllocationRequest currRequest = {};
12589 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12592 m_BufferImageGranularity,
12601 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12602 if(pBestRequestBlock == VMA_NULL ||
12603 currRequestCost < bestRequestCost ||
12606 pBestRequestBlock = pCurrBlock;
12607 bestRequest = currRequest;
12608 bestRequestCost = currRequestCost;
12610 if(bestRequestCost == 0 ||
12620 if(pBestRequestBlock != VMA_NULL)
12624 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12625 if(res != VK_SUCCESS)
12631 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12637 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12638 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12639 UpdateHasEmptyBlock();
12640 (*pAllocation)->InitBlockAllocation(
12642 bestRequest.offset,
12649 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12650 VMA_DEBUG_LOG(
" Returned from existing block");
12651 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12652 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12653 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12655 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12657 if(IsCorruptionDetectionEnabled())
12659 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12660 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12675 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12677 return VK_ERROR_TOO_MANY_OBJECTS;
12681 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12684 void VmaBlockVector::Free(
12687 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12689 bool budgetExceeded =
false;
12691 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12693 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12694 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12699 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12701 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12703 if(IsCorruptionDetectionEnabled())
12705 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12706 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12709 if(hAllocation->IsPersistentMap())
12711 pBlock->Unmap(m_hAllocator, 1);
12714 pBlock->m_pMetadata->Free(hAllocation);
12715 VMA_HEAVY_ASSERT(pBlock->Validate());
12717 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12719 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12721 if(pBlock->m_pMetadata->IsEmpty())
12724 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12726 pBlockToDelete = pBlock;
12733 else if(m_HasEmptyBlock && canDeleteBlock)
12735 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12736 if(pLastBlock->m_pMetadata->IsEmpty())
12738 pBlockToDelete = pLastBlock;
12739 m_Blocks.pop_back();
12743 UpdateHasEmptyBlock();
12744 IncrementallySortBlocks();
12749 if(pBlockToDelete != VMA_NULL)
12751 VMA_DEBUG_LOG(
" Deleted empty block");
12752 pBlockToDelete->Destroy(m_hAllocator);
12753 vma_delete(m_hAllocator, pBlockToDelete);
12757 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12759 VkDeviceSize result = 0;
12760 for(
size_t i = m_Blocks.size(); i--; )
12762 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12763 if(result >= m_PreferredBlockSize)
12771 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12773 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12775 if(m_Blocks[blockIndex] == pBlock)
12777 VmaVectorRemove(m_Blocks, blockIndex);
12784 void VmaBlockVector::IncrementallySortBlocks()
12789 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12791 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12793 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12800 VkResult VmaBlockVector::AllocateFromBlock(
12801 VmaDeviceMemoryBlock* pBlock,
12802 uint32_t currentFrameIndex,
12804 VkDeviceSize alignment,
12807 VmaSuballocationType suballocType,
12816 VmaAllocationRequest currRequest = {};
12817 if(pBlock->m_pMetadata->CreateAllocationRequest(
12820 m_BufferImageGranularity,
12830 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12834 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12835 if(res != VK_SUCCESS)
12841 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12842 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12843 UpdateHasEmptyBlock();
12844 (*pAllocation)->InitBlockAllocation(
12846 currRequest.offset,
12853 VMA_HEAVY_ASSERT(pBlock->Validate());
12854 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12855 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12856 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12858 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12860 if(IsCorruptionDetectionEnabled())
12862 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12863 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12867 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12870 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12872 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12873 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12874 allocInfo.allocationSize = blockSize;
12876 #if VMA_BUFFER_DEVICE_ADDRESS
12878 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
12879 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
12881 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
12882 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
12884 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
12886 VkDeviceMemory mem = VK_NULL_HANDLE;
12887 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12896 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12902 allocInfo.allocationSize,
12906 m_Blocks.push_back(pBlock);
12907 if(pNewBlockIndex != VMA_NULL)
12909 *pNewBlockIndex = m_Blocks.size() - 1;
12915 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12916 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12917 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12919 const size_t blockCount = m_Blocks.size();
12920 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12924 BLOCK_FLAG_USED = 0x00000001,
12925 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12933 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12934 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12935 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12938 const size_t moveCount = moves.size();
12939 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12941 const VmaDefragmentationMove& move = moves[moveIndex];
12942 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12943 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12946 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12949 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12951 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12952 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12953 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12955 currBlockInfo.pMappedData = pBlock->GetMappedData();
12957 if(currBlockInfo.pMappedData == VMA_NULL)
12959 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12960 if(pDefragCtx->res == VK_SUCCESS)
12962 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12969 if(pDefragCtx->res == VK_SUCCESS)
12971 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12972 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12974 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12976 const VmaDefragmentationMove& move = moves[moveIndex];
12978 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12979 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12981 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12986 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12987 memRange.memory = pSrcBlock->GetDeviceMemory();
12988 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12989 memRange.size = VMA_MIN(
12990 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12991 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12992 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12997 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
12998 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
12999 static_cast<size_t>(move.size));
13001 if(IsCorruptionDetectionEnabled())
13003 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
13004 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
13010 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
13011 memRange.memory = pDstBlock->GetDeviceMemory();
13012 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
13013 memRange.size = VMA_MIN(
13014 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
13015 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
13016 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
13023 for(
size_t blockIndex = blockCount; blockIndex--; )
13025 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
13026 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
13028 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13029 pBlock->Unmap(m_hAllocator, 1);
13034 void VmaBlockVector::ApplyDefragmentationMovesGpu(
13035 class VmaBlockVectorDefragmentationContext* pDefragCtx,
13036 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13037 VkCommandBuffer commandBuffer)
13039 const size_t blockCount = m_Blocks.size();
13041 pDefragCtx->blockContexts.resize(blockCount);
13042 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
13045 const size_t moveCount = moves.size();
13046 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13048 const VmaDefragmentationMove& move = moves[moveIndex];
13053 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13054 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
13058 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
13062 VkBufferCreateInfo bufCreateInfo;
13063 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
13065 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
13067 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
13068 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13069 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
13071 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
13072 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
13073 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
13074 if(pDefragCtx->res == VK_SUCCESS)
13076 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
13077 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
13084 if(pDefragCtx->res == VK_SUCCESS)
13086 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
13088 const VmaDefragmentationMove& move = moves[moveIndex];
13090 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
13091 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
13093 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
13095 VkBufferCopy region = {
13099 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
13100 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13105 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13107 pDefragCtx->res = VK_NOT_READY;
13113 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13115 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13116 if(pBlock->m_pMetadata->IsEmpty())
13118 if(m_Blocks.size() > m_MinBlockCount)
13120 if(pDefragmentationStats != VMA_NULL)
13123 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13126 VmaVectorRemove(m_Blocks, blockIndex);
13127 pBlock->Destroy(m_hAllocator);
13128 vma_delete(m_hAllocator, pBlock);
13136 UpdateHasEmptyBlock();
13139 void VmaBlockVector::UpdateHasEmptyBlock()
13141 m_HasEmptyBlock =
false;
13142 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13144 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13145 if(pBlock->m_pMetadata->IsEmpty())
13147 m_HasEmptyBlock =
true;
13153 #if VMA_STATS_STRING_ENABLED
13155 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13157 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13159 json.BeginObject();
13163 const char* poolName = m_hParentPool->GetName();
13164 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13166 json.WriteString(
"Name");
13167 json.WriteString(poolName);
13170 json.WriteString(
"MemoryTypeIndex");
13171 json.WriteNumber(m_MemoryTypeIndex);
13173 json.WriteString(
"BlockSize");
13174 json.WriteNumber(m_PreferredBlockSize);
13176 json.WriteString(
"BlockCount");
13177 json.BeginObject(
true);
13178 if(m_MinBlockCount > 0)
13180 json.WriteString(
"Min");
13181 json.WriteNumber((uint64_t)m_MinBlockCount);
13183 if(m_MaxBlockCount < SIZE_MAX)
13185 json.WriteString(
"Max");
13186 json.WriteNumber((uint64_t)m_MaxBlockCount);
13188 json.WriteString(
"Cur");
13189 json.WriteNumber((uint64_t)m_Blocks.size());
13192 if(m_FrameInUseCount > 0)
13194 json.WriteString(
"FrameInUseCount");
13195 json.WriteNumber(m_FrameInUseCount);
13198 if(m_Algorithm != 0)
13200 json.WriteString(
"Algorithm");
13201 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13206 json.WriteString(
"PreferredBlockSize");
13207 json.WriteNumber(m_PreferredBlockSize);
13210 json.WriteString(
"Blocks");
13211 json.BeginObject();
13212 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13214 json.BeginString();
13215 json.ContinueString(m_Blocks[i]->GetId());
13218 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13225 #endif // #if VMA_STATS_STRING_ENABLED
13227 void VmaBlockVector::Defragment(
13228 class VmaBlockVectorDefragmentationContext* pCtx,
13230 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13231 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13232 VkCommandBuffer commandBuffer)
13234 pCtx->res = VK_SUCCESS;
13236 const VkMemoryPropertyFlags memPropFlags =
13237 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13238 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13240 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13242 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13243 !IsCorruptionDetectionEnabled() &&
13244 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13247 if(canDefragmentOnCpu || canDefragmentOnGpu)
13249 bool defragmentOnGpu;
13251 if(canDefragmentOnGpu != canDefragmentOnCpu)
13253 defragmentOnGpu = canDefragmentOnGpu;
13258 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13259 m_hAllocator->IsIntegratedGpu();
13262 bool overlappingMoveSupported = !defragmentOnGpu;
13264 if(m_hAllocator->m_UseMutex)
13268 if(!m_Mutex.TryLockWrite())
13270 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13276 m_Mutex.LockWrite();
13277 pCtx->mutexLocked =
true;
13281 pCtx->Begin(overlappingMoveSupported, flags);
13285 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13286 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13287 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13290 if(pStats != VMA_NULL)
13292 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13293 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13296 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13297 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13298 if(defragmentOnGpu)
13300 maxGpuBytesToMove -= bytesMoved;
13301 maxGpuAllocationsToMove -= allocationsMoved;
13305 maxCpuBytesToMove -= bytesMoved;
13306 maxCpuAllocationsToMove -= allocationsMoved;
13312 if(m_hAllocator->m_UseMutex)
13313 m_Mutex.UnlockWrite();
13315 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13316 pCtx->res = VK_NOT_READY;
13321 if(pCtx->res >= VK_SUCCESS)
13323 if(defragmentOnGpu)
13325 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13329 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13335 void VmaBlockVector::DefragmentationEnd(
13336 class VmaBlockVectorDefragmentationContext* pCtx,
13342 VMA_ASSERT(pCtx->mutexLocked ==
false);
13346 m_Mutex.LockWrite();
13347 pCtx->mutexLocked =
true;
13351 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13354 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13356 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13357 if(blockCtx.hBuffer)
13359 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13363 if(pCtx->res >= VK_SUCCESS)
13365 FreeEmptyBlocks(pStats);
13369 if(pCtx->mutexLocked)
13371 VMA_ASSERT(m_hAllocator->m_UseMutex);
13372 m_Mutex.UnlockWrite();
13376 uint32_t VmaBlockVector::ProcessDefragmentations(
13377 class VmaBlockVectorDefragmentationContext *pCtx,
13380 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13382 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13384 for(uint32_t i = 0; i < moveCount; ++ i)
13386 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13389 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13390 pMove->
offset = move.dstOffset;
13395 pCtx->defragmentationMovesProcessed += moveCount;
13400 void VmaBlockVector::CommitDefragmentations(
13401 class VmaBlockVectorDefragmentationContext *pCtx,
13404 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13406 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13408 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13410 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13411 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13414 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13415 FreeEmptyBlocks(pStats);
13418 size_t VmaBlockVector::CalcAllocationCount()
const
13421 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13423 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13428 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13430 if(m_BufferImageGranularity == 1)
13434 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13435 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13437 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13438 VMA_ASSERT(m_Algorithm == 0);
13439 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13440 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13448 void VmaBlockVector::MakePoolAllocationsLost(
13449 uint32_t currentFrameIndex,
13450 size_t* pLostAllocationCount)
13452 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13453 size_t lostAllocationCount = 0;
13454 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13456 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13457 VMA_ASSERT(pBlock);
13458 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13460 if(pLostAllocationCount != VMA_NULL)
13462 *pLostAllocationCount = lostAllocationCount;
13466 VkResult VmaBlockVector::CheckCorruption()
13468 if(!IsCorruptionDetectionEnabled())
13470 return VK_ERROR_FEATURE_NOT_PRESENT;
13473 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13474 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13476 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13477 VMA_ASSERT(pBlock);
13478 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13479 if(res != VK_SUCCESS)
13487 void VmaBlockVector::AddStats(
VmaStats* pStats)
13489 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13490 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13492 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13494 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13496 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13497 VMA_ASSERT(pBlock);
13498 VMA_HEAVY_ASSERT(pBlock->Validate());
13500 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13501 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13502 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13503 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13510 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13512 VmaBlockVector* pBlockVector,
13513 uint32_t currentFrameIndex,
13514 bool overlappingMoveSupported) :
13515 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13516 m_AllocationCount(0),
13517 m_AllAllocations(false),
13519 m_AllocationsMoved(0),
13520 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13523 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13524 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13526 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13527 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13528 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13529 m_Blocks.push_back(pBlockInfo);
13533 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13536 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13538 for(
size_t i = m_Blocks.size(); i--; )
13540 vma_delete(m_hAllocator, m_Blocks[i]);
13544 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13547 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13549 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13550 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13551 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13553 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13554 (*it)->m_Allocations.push_back(allocInfo);
13561 ++m_AllocationCount;
13565 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13566 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13567 VkDeviceSize maxBytesToMove,
13568 uint32_t maxAllocationsToMove,
13569 bool freeOldAllocations)
13571 if(m_Blocks.empty())
13584 size_t srcBlockMinIndex = 0;
13597 size_t srcBlockIndex = m_Blocks.size() - 1;
13598 size_t srcAllocIndex = SIZE_MAX;
13604 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13606 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13609 if(srcBlockIndex == srcBlockMinIndex)
13616 srcAllocIndex = SIZE_MAX;
13621 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13625 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13626 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13628 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13629 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13630 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13631 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13634 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13636 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13637 VmaAllocationRequest dstAllocRequest;
13638 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13639 m_CurrentFrameIndex,
13640 m_pBlockVector->GetFrameInUseCount(),
13641 m_pBlockVector->GetBufferImageGranularity(),
13648 &dstAllocRequest) &&
13650 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13652 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13655 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13656 (m_BytesMoved + size > maxBytesToMove))
13661 VmaDefragmentationMove move = {};
13662 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13663 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13664 move.srcOffset = srcOffset;
13665 move.dstOffset = dstAllocRequest.offset;
13667 move.hAllocation = allocInfo.m_hAllocation;
13668 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13669 move.pDstBlock = pDstBlockInfo->m_pBlock;
13671 moves.push_back(move);
13673 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13677 allocInfo.m_hAllocation);
13679 if(freeOldAllocations)
13681 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13682 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13685 if(allocInfo.m_pChanged != VMA_NULL)
13687 *allocInfo.m_pChanged = VK_TRUE;
13690 ++m_AllocationsMoved;
13691 m_BytesMoved += size;
13693 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13701 if(srcAllocIndex > 0)
13707 if(srcBlockIndex > 0)
13710 srcAllocIndex = SIZE_MAX;
13720 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13723 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13725 if(m_Blocks[i]->m_HasNonMovableAllocations)
13733 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13734 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13735 VkDeviceSize maxBytesToMove,
13736 uint32_t maxAllocationsToMove,
13739 if(!m_AllAllocations && m_AllocationCount == 0)
13744 const size_t blockCount = m_Blocks.size();
13745 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13747 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13749 if(m_AllAllocations)
13751 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13752 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13753 it != pMetadata->m_Suballocations.end();
13756 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13758 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13759 pBlockInfo->m_Allocations.push_back(allocInfo);
13764 pBlockInfo->CalcHasNonMovableAllocations();
13768 pBlockInfo->SortAllocationsByOffsetDescending();
13774 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13777 const uint32_t roundCount = 2;
13780 VkResult result = VK_SUCCESS;
13781 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13789 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13790 size_t dstBlockIndex, VkDeviceSize dstOffset,
13791 size_t srcBlockIndex, VkDeviceSize srcOffset)
13793 if(dstBlockIndex < srcBlockIndex)
13797 if(dstBlockIndex > srcBlockIndex)
13801 if(dstOffset < srcOffset)
13811 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13813 VmaBlockVector* pBlockVector,
13814 uint32_t currentFrameIndex,
13815 bool overlappingMoveSupported) :
13816 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13817 m_OverlappingMoveSupported(overlappingMoveSupported),
13818 m_AllocationCount(0),
13819 m_AllAllocations(false),
13821 m_AllocationsMoved(0),
13822 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13824 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13828 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13832 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13833 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13834 VkDeviceSize maxBytesToMove,
13835 uint32_t maxAllocationsToMove,
13838 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13840 const size_t blockCount = m_pBlockVector->GetBlockCount();
13841 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13846 PreprocessMetadata();
13850 m_BlockInfos.resize(blockCount);
13851 for(
size_t i = 0; i < blockCount; ++i)
13853 m_BlockInfos[i].origBlockIndex = i;
13856 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13857 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13858 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13863 FreeSpaceDatabase freeSpaceDb;
13865 size_t dstBlockInfoIndex = 0;
13866 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13867 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13868 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13869 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13870 VkDeviceSize dstOffset = 0;
13873 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13875 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13876 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13877 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13878 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13879 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13881 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13882 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13883 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13884 if(m_AllocationsMoved == maxAllocationsToMove ||
13885 m_BytesMoved + srcAllocSize > maxBytesToMove)
13890 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13892 VmaDefragmentationMove move = {};
13894 size_t freeSpaceInfoIndex;
13895 VkDeviceSize dstAllocOffset;
13896 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13897 freeSpaceInfoIndex, dstAllocOffset))
13899 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13900 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13901 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13904 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13906 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13910 VmaSuballocation suballoc = *srcSuballocIt;
13911 suballoc.offset = dstAllocOffset;
13912 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13913 m_BytesMoved += srcAllocSize;
13914 ++m_AllocationsMoved;
13916 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13918 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13919 srcSuballocIt = nextSuballocIt;
13921 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13923 move.srcBlockIndex = srcOrigBlockIndex;
13924 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13925 move.srcOffset = srcAllocOffset;
13926 move.dstOffset = dstAllocOffset;
13927 move.size = srcAllocSize;
13929 moves.push_back(move);
13936 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13938 VmaSuballocation suballoc = *srcSuballocIt;
13939 suballoc.offset = dstAllocOffset;
13940 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13941 m_BytesMoved += srcAllocSize;
13942 ++m_AllocationsMoved;
13944 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13946 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13947 srcSuballocIt = nextSuballocIt;
13949 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13951 move.srcBlockIndex = srcOrigBlockIndex;
13952 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13953 move.srcOffset = srcAllocOffset;
13954 move.dstOffset = dstAllocOffset;
13955 move.size = srcAllocSize;
13957 moves.push_back(move);
13962 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13965 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13966 dstAllocOffset + srcAllocSize > dstBlockSize)
13969 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13971 ++dstBlockInfoIndex;
13972 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13973 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13974 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13975 dstBlockSize = pDstMetadata->GetSize();
13977 dstAllocOffset = 0;
13981 if(dstBlockInfoIndex == srcBlockInfoIndex)
13983 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13985 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13987 bool skipOver = overlap;
13988 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13992 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13997 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13999 dstOffset = srcAllocOffset + srcAllocSize;
14005 srcSuballocIt->offset = dstAllocOffset;
14006 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
14007 dstOffset = dstAllocOffset + srcAllocSize;
14008 m_BytesMoved += srcAllocSize;
14009 ++m_AllocationsMoved;
14012 move.srcBlockIndex = srcOrigBlockIndex;
14013 move.dstBlockIndex = dstOrigBlockIndex;
14014 move.srcOffset = srcAllocOffset;
14015 move.dstOffset = dstAllocOffset;
14016 move.size = srcAllocSize;
14018 moves.push_back(move);
14026 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
14027 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
14029 VmaSuballocation suballoc = *srcSuballocIt;
14030 suballoc.offset = dstAllocOffset;
14031 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
14032 dstOffset = dstAllocOffset + srcAllocSize;
14033 m_BytesMoved += srcAllocSize;
14034 ++m_AllocationsMoved;
14036 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
14038 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
14039 srcSuballocIt = nextSuballocIt;
14041 pDstMetadata->m_Suballocations.push_back(suballoc);
14043 move.srcBlockIndex = srcOrigBlockIndex;
14044 move.dstBlockIndex = dstOrigBlockIndex;
14045 move.srcOffset = srcAllocOffset;
14046 move.dstOffset = dstAllocOffset;
14047 move.size = srcAllocSize;
14049 moves.push_back(move);
14055 m_BlockInfos.clear();
14057 PostprocessMetadata();
14062 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
14064 const size_t blockCount = m_pBlockVector->GetBlockCount();
14065 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14067 VmaBlockMetadata_Generic*
const pMetadata =
14068 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14069 pMetadata->m_FreeCount = 0;
14070 pMetadata->m_SumFreeSize = pMetadata->GetSize();
14071 pMetadata->m_FreeSuballocationsBySize.clear();
14072 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14073 it != pMetadata->m_Suballocations.end(); )
14075 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
14077 VmaSuballocationList::iterator nextIt = it;
14079 pMetadata->m_Suballocations.erase(it);
14090 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
14092 const size_t blockCount = m_pBlockVector->GetBlockCount();
14093 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
14095 VmaBlockMetadata_Generic*
const pMetadata =
14096 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
14097 const VkDeviceSize blockSize = pMetadata->GetSize();
14100 if(pMetadata->m_Suballocations.empty())
14102 pMetadata->m_FreeCount = 1;
14104 VmaSuballocation suballoc = {
14108 VMA_SUBALLOCATION_TYPE_FREE };
14109 pMetadata->m_Suballocations.push_back(suballoc);
14110 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14115 VkDeviceSize offset = 0;
14116 VmaSuballocationList::iterator it;
14117 for(it = pMetadata->m_Suballocations.begin();
14118 it != pMetadata->m_Suballocations.end();
14121 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14122 VMA_ASSERT(it->offset >= offset);
14125 if(it->offset > offset)
14127 ++pMetadata->m_FreeCount;
14128 const VkDeviceSize freeSize = it->offset - offset;
14129 VmaSuballocation suballoc = {
14133 VMA_SUBALLOCATION_TYPE_FREE };
14134 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14135 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14137 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14141 pMetadata->m_SumFreeSize -= it->size;
14142 offset = it->offset + it->size;
14146 if(offset < blockSize)
14148 ++pMetadata->m_FreeCount;
14149 const VkDeviceSize freeSize = blockSize - offset;
14150 VmaSuballocation suballoc = {
14154 VMA_SUBALLOCATION_TYPE_FREE };
14155 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14156 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14157 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14159 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14164 pMetadata->m_FreeSuballocationsBySize.begin(),
14165 pMetadata->m_FreeSuballocationsBySize.end(),
14166 VmaSuballocationItemSizeLess());
14169 VMA_HEAVY_ASSERT(pMetadata->Validate());
14173 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14176 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14177 while(it != pMetadata->m_Suballocations.end())
14179 if(it->offset < suballoc.offset)
14184 pMetadata->m_Suballocations.insert(it, suballoc);
14190 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14193 VmaBlockVector* pBlockVector,
14194 uint32_t currFrameIndex) :
14196 mutexLocked(false),
14197 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14198 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14199 defragmentationMovesProcessed(0),
14200 defragmentationMovesCommitted(0),
14201 hasDefragmentationPlan(0),
14202 m_hAllocator(hAllocator),
14203 m_hCustomPool(hCustomPool),
14204 m_pBlockVector(pBlockVector),
14205 m_CurrFrameIndex(currFrameIndex),
14206 m_pAlgorithm(VMA_NULL),
14207 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14208 m_AllAllocations(false)
14212 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14214 vma_delete(m_hAllocator, m_pAlgorithm);
14217 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14219 AllocInfo info = { hAlloc, pChanged };
14220 m_Allocations.push_back(info);
14223 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14225 const bool allAllocations = m_AllAllocations ||
14226 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14239 if(VMA_DEBUG_MARGIN == 0 &&
14241 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14244 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14245 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14249 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14250 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14255 m_pAlgorithm->AddAll();
14259 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14261 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14269 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14271 uint32_t currFrameIndex,
14274 m_hAllocator(hAllocator),
14275 m_CurrFrameIndex(currFrameIndex),
14278 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14280 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14283 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14285 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14287 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14288 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14289 vma_delete(m_hAllocator, pBlockVectorCtx);
14291 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14293 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14294 if(pBlockVectorCtx)
14296 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14297 vma_delete(m_hAllocator, pBlockVectorCtx);
14302 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
14304 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14306 VmaPool pool = pPools[poolIndex];
14309 if(pool->m_BlockVector.GetAlgorithm() == 0)
14311 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14313 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14315 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14317 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14322 if(!pBlockVectorDefragCtx)
14324 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14327 &pool->m_BlockVector,
14329 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14332 pBlockVectorDefragCtx->AddAll();
14337 void VmaDefragmentationContext_T::AddAllocations(
14338 uint32_t allocationCount,
14340 VkBool32* pAllocationsChanged)
14343 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14346 VMA_ASSERT(hAlloc);
14348 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14350 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14352 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14354 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14356 if(hAllocPool != VK_NULL_HANDLE)
14359 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14361 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14363 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14365 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14369 if(!pBlockVectorDefragCtx)
14371 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14374 &hAllocPool->m_BlockVector,
14376 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14383 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14384 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14385 if(!pBlockVectorDefragCtx)
14387 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14390 m_hAllocator->m_pBlockVectors[memTypeIndex],
14392 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14396 if(pBlockVectorDefragCtx)
14398 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14399 &pAllocationsChanged[allocIndex] : VMA_NULL;
14400 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14406 VkResult VmaDefragmentationContext_T::Defragment(
14407 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14408 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14420 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14421 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14423 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14424 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14426 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14427 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14430 return VK_NOT_READY;
14433 if(commandBuffer == VK_NULL_HANDLE)
14435 maxGpuBytesToMove = 0;
14436 maxGpuAllocationsToMove = 0;
14439 VkResult res = VK_SUCCESS;
14442 for(uint32_t memTypeIndex = 0;
14443 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14446 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14447 if(pBlockVectorCtx)
14449 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14450 pBlockVectorCtx->GetBlockVector()->Defragment(
14453 maxCpuBytesToMove, maxCpuAllocationsToMove,
14454 maxGpuBytesToMove, maxGpuAllocationsToMove,
14456 if(pBlockVectorCtx->res != VK_SUCCESS)
14458 res = pBlockVectorCtx->res;
14464 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14465 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14468 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14469 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14470 pBlockVectorCtx->GetBlockVector()->Defragment(
14473 maxCpuBytesToMove, maxCpuAllocationsToMove,
14474 maxGpuBytesToMove, maxGpuAllocationsToMove,
14476 if(pBlockVectorCtx->res != VK_SUCCESS)
14478 res = pBlockVectorCtx->res;
14491 for(uint32_t memTypeIndex = 0;
14492 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14495 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14496 if(pBlockVectorCtx)
14498 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14500 if(!pBlockVectorCtx->hasDefragmentationPlan)
14502 pBlockVectorCtx->GetBlockVector()->Defragment(
14505 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14506 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14509 if(pBlockVectorCtx->res < VK_SUCCESS)
14512 pBlockVectorCtx->hasDefragmentationPlan =
true;
14515 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14517 pCurrentMove, movesLeft);
14519 movesLeft -= processed;
14520 pCurrentMove += processed;
14525 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14526 customCtxIndex < customCtxCount;
14529 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14530 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14532 if(!pBlockVectorCtx->hasDefragmentationPlan)
14534 pBlockVectorCtx->GetBlockVector()->Defragment(
14537 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14538 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14541 if(pBlockVectorCtx->res < VK_SUCCESS)
14544 pBlockVectorCtx->hasDefragmentationPlan =
true;
14547 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14549 pCurrentMove, movesLeft);
14551 movesLeft -= processed;
14552 pCurrentMove += processed;
14559 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14561 VkResult res = VK_SUCCESS;
14564 for(uint32_t memTypeIndex = 0;
14565 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14568 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14569 if(pBlockVectorCtx)
14571 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14573 if(!pBlockVectorCtx->hasDefragmentationPlan)
14575 res = VK_NOT_READY;
14579 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14580 pBlockVectorCtx, m_pStats);
14582 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14583 res = VK_NOT_READY;
14588 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14589 customCtxIndex < customCtxCount;
14592 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14593 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14595 if(!pBlockVectorCtx->hasDefragmentationPlan)
14597 res = VK_NOT_READY;
14601 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14602 pBlockVectorCtx, m_pStats);
14604 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14605 res = VK_NOT_READY;
14614 #if VMA_RECORDING_ENABLED
14616 VmaRecorder::VmaRecorder() :
14621 m_StartCounter(INT64_MAX)
14627 m_UseMutex = useMutex;
14628 m_Flags = settings.
flags;
14630 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
14631 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
14634 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14637 return VK_ERROR_INITIALIZATION_FAILED;
14641 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14642 fprintf(m_File,
"%s\n",
"1,8");
14647 VmaRecorder::~VmaRecorder()
14649 if(m_File != VMA_NULL)
14655 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14657 CallParams callParams;
14658 GetBasicParams(callParams);
14660 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14661 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14665 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14667 CallParams callParams;
14668 GetBasicParams(callParams);
14670 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14671 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14677 CallParams callParams;
14678 GetBasicParams(callParams);
14680 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14681 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14692 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14694 CallParams callParams;
14695 GetBasicParams(callParams);
14697 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14698 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14703 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14704 const VkMemoryRequirements& vkMemReq,
14708 CallParams callParams;
14709 GetBasicParams(callParams);
14711 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14712 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14713 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14715 vkMemReq.alignment,
14716 vkMemReq.memoryTypeBits,
14724 userDataStr.GetString());
14728 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14729 const VkMemoryRequirements& vkMemReq,
14731 uint64_t allocationCount,
14734 CallParams callParams;
14735 GetBasicParams(callParams);
14737 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14738 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14739 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14741 vkMemReq.alignment,
14742 vkMemReq.memoryTypeBits,
14749 PrintPointerList(allocationCount, pAllocations);
14750 fprintf(m_File,
",%s\n", userDataStr.GetString());
14754 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14755 const VkMemoryRequirements& vkMemReq,
14756 bool requiresDedicatedAllocation,
14757 bool prefersDedicatedAllocation,
14761 CallParams callParams;
14762 GetBasicParams(callParams);
14764 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14765 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14766 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14768 vkMemReq.alignment,
14769 vkMemReq.memoryTypeBits,
14770 requiresDedicatedAllocation ? 1 : 0,
14771 prefersDedicatedAllocation ? 1 : 0,
14779 userDataStr.GetString());
14783 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14784 const VkMemoryRequirements& vkMemReq,
14785 bool requiresDedicatedAllocation,
14786 bool prefersDedicatedAllocation,
14790 CallParams callParams;
14791 GetBasicParams(callParams);
14793 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14794 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14795 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14797 vkMemReq.alignment,
14798 vkMemReq.memoryTypeBits,
14799 requiresDedicatedAllocation ? 1 : 0,
14800 prefersDedicatedAllocation ? 1 : 0,
14808 userDataStr.GetString());
14812 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14815 CallParams callParams;
14816 GetBasicParams(callParams);
14818 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14819 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14824 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14825 uint64_t allocationCount,
14828 CallParams callParams;
14829 GetBasicParams(callParams);
14831 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14832 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14833 PrintPointerList(allocationCount, pAllocations);
14834 fprintf(m_File,
"\n");
14838 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14840 const void* pUserData)
14842 CallParams callParams;
14843 GetBasicParams(callParams);
14845 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14846 UserDataString userDataStr(
14849 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14851 userDataStr.GetString());
14855 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14858 CallParams callParams;
14859 GetBasicParams(callParams);
14861 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14862 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14867 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14870 CallParams callParams;
14871 GetBasicParams(callParams);
14873 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14874 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14879 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14882 CallParams callParams;
14883 GetBasicParams(callParams);
14885 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14886 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14891 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14892 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14894 CallParams callParams;
14895 GetBasicParams(callParams);
14897 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14898 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14905 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14906 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14908 CallParams callParams;
14909 GetBasicParams(callParams);
14911 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14912 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14919 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14920 const VkBufferCreateInfo& bufCreateInfo,
14924 CallParams callParams;
14925 GetBasicParams(callParams);
14927 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14928 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14929 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14930 bufCreateInfo.flags,
14931 bufCreateInfo.size,
14932 bufCreateInfo.usage,
14933 bufCreateInfo.sharingMode,
14934 allocCreateInfo.
flags,
14935 allocCreateInfo.
usage,
14939 allocCreateInfo.
pool,
14941 userDataStr.GetString());
14945 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14946 const VkImageCreateInfo& imageCreateInfo,
14950 CallParams callParams;
14951 GetBasicParams(callParams);
14953 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14954 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14955 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14956 imageCreateInfo.flags,
14957 imageCreateInfo.imageType,
14958 imageCreateInfo.format,
14959 imageCreateInfo.extent.width,
14960 imageCreateInfo.extent.height,
14961 imageCreateInfo.extent.depth,
14962 imageCreateInfo.mipLevels,
14963 imageCreateInfo.arrayLayers,
14964 imageCreateInfo.samples,
14965 imageCreateInfo.tiling,
14966 imageCreateInfo.usage,
14967 imageCreateInfo.sharingMode,
14968 imageCreateInfo.initialLayout,
14969 allocCreateInfo.
flags,
14970 allocCreateInfo.
usage,
14974 allocCreateInfo.
pool,
14976 userDataStr.GetString());
14980 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14983 CallParams callParams;
14984 GetBasicParams(callParams);
14986 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14987 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14992 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14995 CallParams callParams;
14996 GetBasicParams(callParams);
14998 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14999 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
15004 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
15007 CallParams callParams;
15008 GetBasicParams(callParams);
15010 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15011 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
15016 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
15019 CallParams callParams;
15020 GetBasicParams(callParams);
15022 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15023 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
15028 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
15031 CallParams callParams;
15032 GetBasicParams(callParams);
15034 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15035 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
15040 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
15044 CallParams callParams;
15045 GetBasicParams(callParams);
15047 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15048 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
15051 fprintf(m_File,
",");
15053 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
15063 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
15066 CallParams callParams;
15067 GetBasicParams(callParams);
15069 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15070 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
15075 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
15079 CallParams callParams;
15080 GetBasicParams(callParams);
15082 VmaMutexLock lock(m_FileMutex, m_UseMutex);
15083 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
15084 pool, name != VMA_NULL ? name :
"");
15090 if(pUserData != VMA_NULL)
15094 m_Str = (
const char*)pUserData;
15098 sprintf_s(m_PtrStr,
"%p", pUserData);
15108 void VmaRecorder::WriteConfiguration(
15109 const VkPhysicalDeviceProperties& devProps,
15110 const VkPhysicalDeviceMemoryProperties& memProps,
15111 uint32_t vulkanApiVersion,
15112 bool dedicatedAllocationExtensionEnabled,
15113 bool bindMemory2ExtensionEnabled,
15114 bool memoryBudgetExtensionEnabled,
15115 bool deviceCoherentMemoryExtensionEnabled)
15117 fprintf(m_File,
"Config,Begin\n");
15119 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15121 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15122 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15123 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15124 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15125 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15126 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15128 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15129 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15130 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15132 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15133 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15135 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15136 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15138 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15139 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15141 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15142 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15145 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15146 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15147 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15148 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15150 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15151 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15152 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15153 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15154 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15155 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15156 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15157 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15158 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15160 fprintf(m_File,
"Config,End\n");
15163 void VmaRecorder::GetBasicParams(CallParams& outParams)
15165 outParams.threadId = GetCurrentThreadId();
15167 LARGE_INTEGER counter;
15168 QueryPerformanceCounter(&counter);
15169 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
15172 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15176 fprintf(m_File,
"%p", pItems[0]);
15177 for(uint64_t i = 1; i < count; ++i)
15179 fprintf(m_File,
" %p", pItems[i]);
15184 void VmaRecorder::Flush()
15192 #endif // #if VMA_RECORDING_ENABLED
15197 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15198 m_Allocator(pAllocationCallbacks, 1024)
15202 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15204 VmaMutexLock mutexLock(m_Mutex);
15205 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15208 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15210 VmaMutexLock mutexLock(m_Mutex);
15211 m_Allocator.Free(hAlloc);
15219 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15225 m_hDevice(pCreateInfo->device),
15226 m_hInstance(pCreateInfo->instance),
15227 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15228 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15229 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15230 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15231 m_HeapSizeLimitMask(0),
15232 m_PreferredLargeHeapBlockSize(0),
15233 m_PhysicalDevice(pCreateInfo->physicalDevice),
15234 m_CurrentFrameIndex(0),
15235 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15236 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15238 m_GlobalMemoryTypeBits(UINT32_MAX)
15240 ,m_pRecorder(VMA_NULL)
15243 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15245 m_UseKhrDedicatedAllocation =
false;
15246 m_UseKhrBindMemory2 =
false;
15249 if(VMA_DEBUG_DETECT_CORRUPTION)
15252 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15257 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15259 #if !(VMA_DEDICATED_ALLOCATION)
15262 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15265 #if !(VMA_BIND_MEMORY2)
15268 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15272 #if !(VMA_MEMORY_BUDGET)
15275 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15278 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15279 if(m_UseKhrBufferDeviceAddress)
15281 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15284 #if VMA_VULKAN_VERSION < 1002000
15285 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15287 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15290 #if VMA_VULKAN_VERSION < 1001000
15291 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15293 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15297 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15298 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15299 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15301 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15302 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15303 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15314 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15315 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15317 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15318 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15319 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15320 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15325 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15329 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15331 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15332 if(limit != VK_WHOLE_SIZE)
15334 m_HeapSizeLimitMask |= 1u << heapIndex;
15335 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15337 m_MemProps.memoryHeaps[heapIndex].size = limit;
15343 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15345 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15347 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15351 preferredBlockSize,
15354 GetBufferImageGranularity(),
15360 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15367 VkResult res = VK_SUCCESS;
15372 #if VMA_RECORDING_ENABLED
15373 m_pRecorder = vma_new(
this, VmaRecorder)();
15375 if(res != VK_SUCCESS)
15379 m_pRecorder->WriteConfiguration(
15380 m_PhysicalDeviceProperties,
15382 m_VulkanApiVersion,
15383 m_UseKhrDedicatedAllocation,
15384 m_UseKhrBindMemory2,
15385 m_UseExtMemoryBudget,
15386 m_UseAmdDeviceCoherentMemory);
15387 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15389 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15390 return VK_ERROR_FEATURE_NOT_PRESENT;
15394 #if VMA_MEMORY_BUDGET
15395 if(m_UseExtMemoryBudget)
15397 UpdateVulkanBudget();
15399 #endif // #if VMA_MEMORY_BUDGET
15404 VmaAllocator_T::~VmaAllocator_T()
15406 #if VMA_RECORDING_ENABLED
15407 if(m_pRecorder != VMA_NULL)
15409 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15410 vma_delete(
this, m_pRecorder);
15414 VMA_ASSERT(m_Pools.empty());
15416 for(
size_t i = GetMemoryTypeCount(); i--; )
15418 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15420 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15423 vma_delete(
this, m_pDedicatedAllocations[i]);
15424 vma_delete(
this, m_pBlockVectors[i]);
15428 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15430 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15431 ImportVulkanFunctions_Static();
15434 if(pVulkanFunctions != VMA_NULL)
15436 ImportVulkanFunctions_Custom(pVulkanFunctions);
15439 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15440 ImportVulkanFunctions_Dynamic();
15443 ValidateVulkanFunctions();
15446 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15448 void VmaAllocator_T::ImportVulkanFunctions_Static()
15451 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15452 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15453 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15454 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15455 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15456 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15457 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15458 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15459 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15460 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15461 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15462 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15463 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15464 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15465 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15466 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15467 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15470 #if VMA_VULKAN_VERSION >= 1001000
15471 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15473 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15474 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15475 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15476 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15477 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15482 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15484 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15486 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15488 #define VMA_COPY_IF_NOT_NULL(funcName) \
15489 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15491 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15492 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15493 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15494 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15495 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15496 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15497 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15498 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15499 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15500 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15501 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15502 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15503 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15504 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15505 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15506 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15507 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15509 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15510 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15511 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15514 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15515 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15516 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15519 #if VMA_MEMORY_BUDGET
15520 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15523 #undef VMA_COPY_IF_NOT_NULL
15526 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15528 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15530 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15531 if(m_VulkanFunctions.memberName == VMA_NULL) \
15532 m_VulkanFunctions.memberName = \
15533 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15534 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15535 if(m_VulkanFunctions.memberName == VMA_NULL) \
15536 m_VulkanFunctions.memberName = \
15537 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15539 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15540 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15541 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15542 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15543 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15544 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15545 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15546 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15547 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15548 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15549 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15550 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15551 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15552 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15553 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15554 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15555 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
15557 #if VMA_DEDICATED_ALLOCATION
15558 if(m_UseKhrDedicatedAllocation)
15560 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
15561 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
15565 #if VMA_BIND_MEMORY2
15566 if(m_UseKhrBindMemory2)
15568 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
15569 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
15571 #endif // #if VMA_BIND_MEMORY2
15573 #if VMA_MEMORY_BUDGET
15574 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15576 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15578 #endif // #if VMA_MEMORY_BUDGET
15580 #undef VMA_FETCH_DEVICE_FUNC
15581 #undef VMA_FETCH_INSTANCE_FUNC
15584 #endif // #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15586 void VmaAllocator_T::ValidateVulkanFunctions()
15588 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15589 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15590 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15591 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15592 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15593 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15594 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15595 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15596 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15597 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15598 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15599 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15600 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15601 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15602 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15603 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15604 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15606 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15607 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15609 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15610 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15614 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15615 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15617 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15618 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15622 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15623 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15625 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15630 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15632 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15633 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15634 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15635 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15638 VkResult VmaAllocator_T::AllocateMemoryOfType(
15640 VkDeviceSize alignment,
15641 bool dedicatedAllocation,
15642 VkBuffer dedicatedBuffer,
15643 VkBufferUsageFlags dedicatedBufferUsage,
15644 VkImage dedicatedImage,
15646 uint32_t memTypeIndex,
15647 VmaSuballocationType suballocType,
15648 size_t allocationCount,
15651 VMA_ASSERT(pAllocations != VMA_NULL);
15652 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15658 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15668 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15669 VMA_ASSERT(blockVector);
15671 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15672 bool preferDedicatedMemory =
15673 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15674 dedicatedAllocation ||
15676 size > preferredBlockSize / 2;
15678 if(preferDedicatedMemory &&
15680 finalCreateInfo.
pool == VK_NULL_HANDLE)
15689 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15693 return AllocateDedicatedMemory(
15702 dedicatedBufferUsage,
15710 VkResult res = blockVector->Allocate(
15711 m_CurrentFrameIndex.load(),
15718 if(res == VK_SUCCESS)
15726 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15730 res = AllocateDedicatedMemory(
15739 dedicatedBufferUsage,
15743 if(res == VK_SUCCESS)
15746 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15752 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15759 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15761 VmaSuballocationType suballocType,
15762 uint32_t memTypeIndex,
15765 bool isUserDataString,
15767 VkBuffer dedicatedBuffer,
15768 VkBufferUsageFlags dedicatedBufferUsage,
15769 VkImage dedicatedImage,
15770 size_t allocationCount,
15773 VMA_ASSERT(allocationCount > 0 && pAllocations);
15777 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15779 GetBudget(&heapBudget, heapIndex, 1);
15780 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15782 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15786 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15787 allocInfo.memoryTypeIndex = memTypeIndex;
15788 allocInfo.allocationSize = size;
15790 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15791 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15792 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15794 if(dedicatedBuffer != VK_NULL_HANDLE)
15796 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15797 dedicatedAllocInfo.buffer = dedicatedBuffer;
15798 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
15800 else if(dedicatedImage != VK_NULL_HANDLE)
15802 dedicatedAllocInfo.image = dedicatedImage;
15803 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
15806 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15808 #if VMA_BUFFER_DEVICE_ADDRESS
15809 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
15810 if(m_UseKhrBufferDeviceAddress)
15812 bool canContainBufferWithDeviceAddress =
true;
15813 if(dedicatedBuffer != VK_NULL_HANDLE)
15815 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
15816 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
15818 else if(dedicatedImage != VK_NULL_HANDLE)
15820 canContainBufferWithDeviceAddress =
false;
15822 if(canContainBufferWithDeviceAddress)
15824 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
15825 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
15828 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
15831 VkResult res = VK_SUCCESS;
15832 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15834 res = AllocateDedicatedMemoryPage(
15842 pAllocations + allocIndex);
15843 if(res != VK_SUCCESS)
15849 if(res == VK_SUCCESS)
15853 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15854 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15855 VMA_ASSERT(pDedicatedAllocations);
15856 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15858 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15862 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15867 while(allocIndex--)
15870 VkDeviceMemory hMemory = currAlloc->GetMemory();
15882 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15883 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15884 currAlloc->SetUserData(
this, VMA_NULL);
15885 m_AllocationObjectAllocator.Free(currAlloc);
15888 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15894 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15896 VmaSuballocationType suballocType,
15897 uint32_t memTypeIndex,
15898 const VkMemoryAllocateInfo& allocInfo,
15900 bool isUserDataString,
15904 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15905 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15908 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15912 void* pMappedData = VMA_NULL;
15915 res = (*m_VulkanFunctions.vkMapMemory)(
15924 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15925 FreeVulkanMemory(memTypeIndex, size, hMemory);
15930 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
15931 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15932 (*pAllocation)->SetUserData(
this, pUserData);
15933 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15934 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15936 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15942 void VmaAllocator_T::GetBufferMemoryRequirements(
15944 VkMemoryRequirements& memReq,
15945 bool& requiresDedicatedAllocation,
15946 bool& prefersDedicatedAllocation)
const
15948 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15949 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15951 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15952 memReqInfo.buffer = hBuffer;
15954 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15956 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15957 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
15959 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15961 memReq = memReq2.memoryRequirements;
15962 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15963 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15966 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15968 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15969 requiresDedicatedAllocation =
false;
15970 prefersDedicatedAllocation =
false;
15974 void VmaAllocator_T::GetImageMemoryRequirements(
15976 VkMemoryRequirements& memReq,
15977 bool& requiresDedicatedAllocation,
15978 bool& prefersDedicatedAllocation)
const
15980 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15981 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15983 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15984 memReqInfo.image = hImage;
15986 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15988 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15989 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
15991 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15993 memReq = memReq2.memoryRequirements;
15994 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15995 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15998 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
16000 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
16001 requiresDedicatedAllocation =
false;
16002 prefersDedicatedAllocation =
false;
16006 VkResult VmaAllocator_T::AllocateMemory(
16007 const VkMemoryRequirements& vkMemReq,
16008 bool requiresDedicatedAllocation,
16009 bool prefersDedicatedAllocation,
16010 VkBuffer dedicatedBuffer,
16011 VkBufferUsageFlags dedicatedBufferUsage,
16012 VkImage dedicatedImage,
16014 VmaSuballocationType suballocType,
16015 size_t allocationCount,
16018 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
16020 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
16022 if(vkMemReq.size == 0)
16024 return VK_ERROR_VALIDATION_FAILED_EXT;
16029 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
16030 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16035 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
16036 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16038 if(requiresDedicatedAllocation)
16042 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
16043 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16045 if(createInfo.
pool != VK_NULL_HANDLE)
16047 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
16048 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16051 if((createInfo.
pool != VK_NULL_HANDLE) &&
16054 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
16055 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16058 if(createInfo.
pool != VK_NULL_HANDLE)
16060 const VkDeviceSize alignmentForPool = VMA_MAX(
16061 vkMemReq.alignment,
16062 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
16067 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16072 return createInfo.
pool->m_BlockVector.Allocate(
16073 m_CurrentFrameIndex.load(),
16084 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
16085 uint32_t memTypeIndex = UINT32_MAX;
16087 if(res == VK_SUCCESS)
16089 VkDeviceSize alignmentForMemType = VMA_MAX(
16090 vkMemReq.alignment,
16091 GetMemoryTypeMinAlignment(memTypeIndex));
16093 res = AllocateMemoryOfType(
16095 alignmentForMemType,
16096 requiresDedicatedAllocation || prefersDedicatedAllocation,
16098 dedicatedBufferUsage,
16106 if(res == VK_SUCCESS)
16116 memoryTypeBits &= ~(1u << memTypeIndex);
16119 if(res == VK_SUCCESS)
16121 alignmentForMemType = VMA_MAX(
16122 vkMemReq.alignment,
16123 GetMemoryTypeMinAlignment(memTypeIndex));
16125 res = AllocateMemoryOfType(
16127 alignmentForMemType,
16128 requiresDedicatedAllocation || prefersDedicatedAllocation,
16130 dedicatedBufferUsage,
16138 if(res == VK_SUCCESS)
16148 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16159 void VmaAllocator_T::FreeMemory(
16160 size_t allocationCount,
16163 VMA_ASSERT(pAllocations);
16165 for(
size_t allocIndex = allocationCount; allocIndex--; )
16169 if(allocation != VK_NULL_HANDLE)
16171 if(TouchAllocation(allocation))
16173 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16175 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16178 switch(allocation->GetType())
16180 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16182 VmaBlockVector* pBlockVector = VMA_NULL;
16183 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16184 if(hPool != VK_NULL_HANDLE)
16186 pBlockVector = &hPool->m_BlockVector;
16190 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16191 pBlockVector = m_pBlockVectors[memTypeIndex];
16193 pBlockVector->Free(allocation);
16196 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16197 FreeDedicatedMemory(allocation);
16205 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16206 allocation->SetUserData(
this, VMA_NULL);
16207 m_AllocationObjectAllocator.Free(allocation);
16212 VkResult VmaAllocator_T::ResizeAllocation(
16214 VkDeviceSize newSize)
16217 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16219 return VK_ERROR_VALIDATION_FAILED_EXT;
16221 if(newSize == alloc->GetSize())
16225 return VK_ERROR_OUT_OF_POOL_MEMORY;
16228 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16231 InitStatInfo(pStats->
total);
16232 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16234 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16238 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16240 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16241 VMA_ASSERT(pBlockVector);
16242 pBlockVector->AddStats(pStats);
16247 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16248 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16250 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16255 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16257 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16258 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16259 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16260 VMA_ASSERT(pDedicatedAllocVector);
16261 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16264 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16265 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16266 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16267 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16272 VmaPostprocessCalcStatInfo(pStats->
total);
16273 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16274 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16275 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16276 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16279 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16281 #if VMA_MEMORY_BUDGET
16282 if(m_UseExtMemoryBudget)
16284 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16286 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16287 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16289 const uint32_t heapIndex = firstHeap + i;
16291 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16294 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16296 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16297 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16301 outBudget->
usage = 0;
16305 outBudget->
budget = VMA_MIN(
16306 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16311 UpdateVulkanBudget();
16312 GetBudget(outBudget, firstHeap, heapCount);
16318 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16320 const uint32_t heapIndex = firstHeap + i;
16322 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16326 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16331 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16333 VkResult VmaAllocator_T::DefragmentationBegin(
16343 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16344 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16347 (*pContext)->AddAllocations(
16350 VkResult res = (*pContext)->Defragment(
16355 if(res != VK_NOT_READY)
16357 vma_delete(
this, *pContext);
16358 *pContext = VMA_NULL;
16364 VkResult VmaAllocator_T::DefragmentationEnd(
16367 vma_delete(
this, context);
16371 VkResult VmaAllocator_T::DefragmentationPassBegin(
16375 return context->DefragmentPassBegin(pInfo);
16377 VkResult VmaAllocator_T::DefragmentationPassEnd(
16380 return context->DefragmentPassEnd();
16386 if(hAllocation->CanBecomeLost())
16392 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16393 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16396 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16400 pAllocationInfo->
offset = 0;
16401 pAllocationInfo->
size = hAllocation->GetSize();
16403 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16406 else if(localLastUseFrameIndex == localCurrFrameIndex)
16408 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16409 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16410 pAllocationInfo->
offset = hAllocation->GetOffset();
16411 pAllocationInfo->
size = hAllocation->GetSize();
16413 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16418 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16420 localLastUseFrameIndex = localCurrFrameIndex;
16427 #if VMA_STATS_STRING_ENABLED
16428 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16429 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16432 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16433 if(localLastUseFrameIndex == localCurrFrameIndex)
16439 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16441 localLastUseFrameIndex = localCurrFrameIndex;
16447 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16448 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16449 pAllocationInfo->
offset = hAllocation->GetOffset();
16450 pAllocationInfo->
size = hAllocation->GetSize();
16451 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16452 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16456 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16459 if(hAllocation->CanBecomeLost())
16461 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16462 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16465 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16469 else if(localLastUseFrameIndex == localCurrFrameIndex)
16475 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16477 localLastUseFrameIndex = localCurrFrameIndex;
16484 #if VMA_STATS_STRING_ENABLED
16485 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16486 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16489 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16490 if(localLastUseFrameIndex == localCurrFrameIndex)
16496 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16498 localLastUseFrameIndex = localCurrFrameIndex;
16510 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16520 return VK_ERROR_INITIALIZATION_FAILED;
16524 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16526 return VK_ERROR_FEATURE_NOT_PRESENT;
16529 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16531 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16533 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16534 if(res != VK_SUCCESS)
16536 vma_delete(
this, *pPool);
16543 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16544 (*pPool)->SetId(m_NextPoolId++);
16545 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16551 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16555 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16556 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16557 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16560 vma_delete(
this, pool);
16565 pool->m_BlockVector.GetPoolStats(pPoolStats);
16568 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16570 m_CurrentFrameIndex.store(frameIndex);
16572 #if VMA_MEMORY_BUDGET
16573 if(m_UseExtMemoryBudget)
16575 UpdateVulkanBudget();
16577 #endif // #if VMA_MEMORY_BUDGET
16580 void VmaAllocator_T::MakePoolAllocationsLost(
16582 size_t* pLostAllocationCount)
16584 hPool->m_BlockVector.MakePoolAllocationsLost(
16585 m_CurrentFrameIndex.load(),
16586 pLostAllocationCount);
16589 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16591 return hPool->m_BlockVector.CheckCorruption();
16594 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16596 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16599 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16601 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16603 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16604 VMA_ASSERT(pBlockVector);
16605 VkResult localRes = pBlockVector->CheckCorruption();
16608 case VK_ERROR_FEATURE_NOT_PRESENT:
16611 finalRes = VK_SUCCESS;
16621 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16622 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16624 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16626 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16629 case VK_ERROR_FEATURE_NOT_PRESENT:
16632 finalRes = VK_SUCCESS;
16644 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16646 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16647 (*pAllocation)->InitLost();
16650 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16652 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16655 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16657 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16658 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16661 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16662 if(blockBytesAfterAllocation > heapSize)
16664 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16666 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16674 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16678 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16680 if(res == VK_SUCCESS)
16682 #if VMA_MEMORY_BUDGET
16683 ++m_Budget.m_OperationsSinceBudgetFetch;
16687 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16689 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
16694 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16700 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16703 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16705 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
16709 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16711 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16714 VkResult VmaAllocator_T::BindVulkanBuffer(
16715 VkDeviceMemory memory,
16716 VkDeviceSize memoryOffset,
16720 if(pNext != VMA_NULL)
16722 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16723 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16724 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
16726 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
16727 bindBufferMemoryInfo.pNext = pNext;
16728 bindBufferMemoryInfo.buffer = buffer;
16729 bindBufferMemoryInfo.memory = memory;
16730 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16731 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16734 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16736 return VK_ERROR_EXTENSION_NOT_PRESENT;
16741 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
16745 VkResult VmaAllocator_T::BindVulkanImage(
16746 VkDeviceMemory memory,
16747 VkDeviceSize memoryOffset,
16751 if(pNext != VMA_NULL)
16753 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16754 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16755 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
16757 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
16758 bindBufferMemoryInfo.pNext = pNext;
16759 bindBufferMemoryInfo.image = image;
16760 bindBufferMemoryInfo.memory = memory;
16761 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16762 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16765 #endif // #if VMA_BIND_MEMORY2
16767 return VK_ERROR_EXTENSION_NOT_PRESENT;
16772 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
16776 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
16778 if(hAllocation->CanBecomeLost())
16780 return VK_ERROR_MEMORY_MAP_FAILED;
16783 switch(hAllocation->GetType())
16785 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16787 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16788 char *pBytes = VMA_NULL;
16789 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
16790 if(res == VK_SUCCESS)
16792 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
16793 hAllocation->BlockAllocMap();
16797 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16798 return hAllocation->DedicatedAllocMap(
this, ppData);
16801 return VK_ERROR_MEMORY_MAP_FAILED;
16807 switch(hAllocation->GetType())
16809 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16811 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16812 hAllocation->BlockAllocUnmap();
16813 pBlock->Unmap(
this, 1);
16816 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16817 hAllocation->DedicatedAllocUnmap(
this);
16824 VkResult VmaAllocator_T::BindBufferMemory(
16826 VkDeviceSize allocationLocalOffset,
16830 VkResult res = VK_SUCCESS;
16831 switch(hAllocation->GetType())
16833 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16834 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16836 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16838 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16839 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16840 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16849 VkResult VmaAllocator_T::BindImageMemory(
16851 VkDeviceSize allocationLocalOffset,
16855 VkResult res = VK_SUCCESS;
16856 switch(hAllocation->GetType())
16858 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16859 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16861 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16863 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16864 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16865 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16874 void VmaAllocator_T::FlushOrInvalidateAllocation(
16876 VkDeviceSize offset, VkDeviceSize size,
16877 VMA_CACHE_OPERATION op)
16879 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16880 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16882 const VkDeviceSize allocationSize = hAllocation->GetSize();
16883 VMA_ASSERT(offset <= allocationSize);
16885 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16887 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16888 memRange.memory = hAllocation->GetMemory();
16890 switch(hAllocation->GetType())
16892 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16893 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16894 if(size == VK_WHOLE_SIZE)
16896 memRange.size = allocationSize - memRange.offset;
16900 VMA_ASSERT(offset + size <= allocationSize);
16901 memRange.size = VMA_MIN(
16902 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16903 allocationSize - memRange.offset);
16907 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16910 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16911 if(size == VK_WHOLE_SIZE)
16913 size = allocationSize - offset;
16917 VMA_ASSERT(offset + size <= allocationSize);
16919 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16922 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16923 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16924 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16925 memRange.offset += allocationOffset;
16926 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16937 case VMA_CACHE_FLUSH:
16938 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16940 case VMA_CACHE_INVALIDATE:
16941 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16950 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16952 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16954 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16956 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16957 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16958 VMA_ASSERT(pDedicatedAllocations);
16959 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16960 VMA_ASSERT(success);
16963 VkDeviceMemory hMemory = allocation->GetMemory();
16975 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16977 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16980 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16982 VkBufferCreateInfo dummyBufCreateInfo;
16983 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16985 uint32_t memoryTypeBits = 0;
16988 VkBuffer buf = VK_NULL_HANDLE;
16989 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16990 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16991 if(res == VK_SUCCESS)
16994 VkMemoryRequirements memReq;
16995 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16996 memoryTypeBits = memReq.memoryTypeBits;
16999 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
17002 return memoryTypeBits;
17005 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
17008 VMA_ASSERT(GetMemoryTypeCount() > 0);
17010 uint32_t memoryTypeBits = UINT32_MAX;
17012 if(!m_UseAmdDeviceCoherentMemory)
17015 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17017 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17019 memoryTypeBits &= ~(1u << memTypeIndex);
17024 return memoryTypeBits;
17027 #if VMA_MEMORY_BUDGET
17029 void VmaAllocator_T::UpdateVulkanBudget()
17031 VMA_ASSERT(m_UseExtMemoryBudget);
17033 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
17035 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
17036 VmaPnextChainPushFront(&memProps, &budgetProps);
17038 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
17041 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
17043 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
17045 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
17046 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
17047 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
17050 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
17052 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
17054 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
17056 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
17058 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
17060 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
17063 m_Budget.m_OperationsSinceBudgetFetch = 0;
17067 #endif // #if VMA_MEMORY_BUDGET
17069 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
17071 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
17072 !hAllocation->CanBecomeLost() &&
17073 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17075 void* pData = VMA_NULL;
17076 VkResult res = Map(hAllocation, &pData);
17077 if(res == VK_SUCCESS)
17079 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
17080 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
17081 Unmap(hAllocation);
17085 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
17090 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
17092 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
17093 if(memoryTypeBits == UINT32_MAX)
17095 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
17096 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
17098 return memoryTypeBits;
17101 #if VMA_STATS_STRING_ENABLED
17103 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
17105 bool dedicatedAllocationsStarted =
false;
17106 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17108 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17109 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17110 VMA_ASSERT(pDedicatedAllocVector);
17111 if(pDedicatedAllocVector->empty() ==
false)
17113 if(dedicatedAllocationsStarted ==
false)
17115 dedicatedAllocationsStarted =
true;
17116 json.WriteString(
"DedicatedAllocations");
17117 json.BeginObject();
17120 json.BeginString(
"Type ");
17121 json.ContinueString(memTypeIndex);
17126 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17128 json.BeginObject(
true);
17130 hAlloc->PrintParameters(json);
17137 if(dedicatedAllocationsStarted)
17143 bool allocationsStarted =
false;
17144 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17146 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17148 if(allocationsStarted ==
false)
17150 allocationsStarted =
true;
17151 json.WriteString(
"DefaultPools");
17152 json.BeginObject();
17155 json.BeginString(
"Type ");
17156 json.ContinueString(memTypeIndex);
17159 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17162 if(allocationsStarted)
17170 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17171 const size_t poolCount = m_Pools.size();
17174 json.WriteString(
"Pools");
17175 json.BeginObject();
17176 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17178 json.BeginString();
17179 json.ContinueString(m_Pools[poolIndex]->GetId());
17182 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17189 #endif // #if VMA_STATS_STRING_ENABLED
17198 VMA_ASSERT(pCreateInfo && pAllocator);
17201 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17203 return (*pAllocator)->Init(pCreateInfo);
17209 if(allocator != VK_NULL_HANDLE)
17211 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17212 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17213 vma_delete(&allocationCallbacks, allocator);
17219 VMA_ASSERT(allocator && pAllocatorInfo);
17220 pAllocatorInfo->
instance = allocator->m_hInstance;
17221 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17222 pAllocatorInfo->
device = allocator->m_hDevice;
17227 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17229 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17230 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17235 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17237 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17238 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17243 uint32_t memoryTypeIndex,
17244 VkMemoryPropertyFlags* pFlags)
17246 VMA_ASSERT(allocator && pFlags);
17247 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17248 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17253 uint32_t frameIndex)
17255 VMA_ASSERT(allocator);
17256 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17258 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17260 allocator->SetCurrentFrameIndex(frameIndex);
17267 VMA_ASSERT(allocator && pStats);
17268 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17269 allocator->CalculateStats(pStats);
17276 VMA_ASSERT(allocator && pBudget);
17277 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17278 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17281 #if VMA_STATS_STRING_ENABLED
17285 char** ppStatsString,
17286 VkBool32 detailedMap)
17288 VMA_ASSERT(allocator && ppStatsString);
17289 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17291 VmaStringBuilder sb(allocator);
17293 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17294 json.BeginObject();
17297 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17300 allocator->CalculateStats(&stats);
17302 json.WriteString(
"Total");
17303 VmaPrintStatInfo(json, stats.
total);
17305 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17307 json.BeginString(
"Heap ");
17308 json.ContinueString(heapIndex);
17310 json.BeginObject();
17312 json.WriteString(
"Size");
17313 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17315 json.WriteString(
"Flags");
17316 json.BeginArray(
true);
17317 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17319 json.WriteString(
"DEVICE_LOCAL");
17323 json.WriteString(
"Budget");
17324 json.BeginObject();
17326 json.WriteString(
"BlockBytes");
17327 json.WriteNumber(budget[heapIndex].blockBytes);
17328 json.WriteString(
"AllocationBytes");
17329 json.WriteNumber(budget[heapIndex].allocationBytes);
17330 json.WriteString(
"Usage");
17331 json.WriteNumber(budget[heapIndex].usage);
17332 json.WriteString(
"Budget");
17333 json.WriteNumber(budget[heapIndex].budget);
17339 json.WriteString(
"Stats");
17340 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17343 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17345 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17347 json.BeginString(
"Type ");
17348 json.ContinueString(typeIndex);
17351 json.BeginObject();
17353 json.WriteString(
"Flags");
17354 json.BeginArray(
true);
17355 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17356 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17358 json.WriteString(
"DEVICE_LOCAL");
17360 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17362 json.WriteString(
"HOST_VISIBLE");
17364 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17366 json.WriteString(
"HOST_COHERENT");
17368 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17370 json.WriteString(
"HOST_CACHED");
17372 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17374 json.WriteString(
"LAZILY_ALLOCATED");
17376 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17378 json.WriteString(
" PROTECTED");
17380 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17382 json.WriteString(
" DEVICE_COHERENT");
17384 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17386 json.WriteString(
" DEVICE_UNCACHED");
17392 json.WriteString(
"Stats");
17393 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17402 if(detailedMap == VK_TRUE)
17404 allocator->PrintDetailedMap(json);
17410 const size_t len = sb.GetLength();
17411 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17414 memcpy(pChars, sb.GetData(), len);
17416 pChars[len] =
'\0';
17417 *ppStatsString = pChars;
17422 char* pStatsString)
17424 if(pStatsString != VMA_NULL)
17426 VMA_ASSERT(allocator);
17427 size_t len = strlen(pStatsString);
17428 vma_delete_array(allocator, pStatsString, len + 1);
17432 #endif // #if VMA_STATS_STRING_ENABLED
17439 uint32_t memoryTypeBits,
17441 uint32_t* pMemoryTypeIndex)
17443 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17444 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17445 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17447 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17454 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17455 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17456 uint32_t notPreferredFlags = 0;
17459 switch(pAllocationCreateInfo->
usage)
17464 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17466 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17470 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17473 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17474 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17476 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17480 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17481 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17484 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17487 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17496 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17498 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17501 *pMemoryTypeIndex = UINT32_MAX;
17502 uint32_t minCost = UINT32_MAX;
17503 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17504 memTypeIndex < allocator->GetMemoryTypeCount();
17505 ++memTypeIndex, memTypeBit <<= 1)
17508 if((memTypeBit & memoryTypeBits) != 0)
17510 const VkMemoryPropertyFlags currFlags =
17511 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17513 if((requiredFlags & ~currFlags) == 0)
17516 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17517 VmaCountBitsSet(currFlags & notPreferredFlags);
17519 if(currCost < minCost)
17521 *pMemoryTypeIndex = memTypeIndex;
17526 minCost = currCost;
17531 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17536 const VkBufferCreateInfo* pBufferCreateInfo,
17538 uint32_t* pMemoryTypeIndex)
17540 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17541 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17542 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17543 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17545 const VkDevice hDev = allocator->m_hDevice;
17546 VkBuffer hBuffer = VK_NULL_HANDLE;
17547 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17548 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17549 if(res == VK_SUCCESS)
17551 VkMemoryRequirements memReq = {};
17552 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17553 hDev, hBuffer, &memReq);
17557 memReq.memoryTypeBits,
17558 pAllocationCreateInfo,
17561 allocator->GetVulkanFunctions().vkDestroyBuffer(
17562 hDev, hBuffer, allocator->GetAllocationCallbacks());
17569 const VkImageCreateInfo* pImageCreateInfo,
17571 uint32_t* pMemoryTypeIndex)
17573 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17574 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17575 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17576 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17578 const VkDevice hDev = allocator->m_hDevice;
17579 VkImage hImage = VK_NULL_HANDLE;
17580 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17581 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17582 if(res == VK_SUCCESS)
17584 VkMemoryRequirements memReq = {};
17585 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17586 hDev, hImage, &memReq);
17590 memReq.memoryTypeBits,
17591 pAllocationCreateInfo,
17594 allocator->GetVulkanFunctions().vkDestroyImage(
17595 hDev, hImage, allocator->GetAllocationCallbacks());
17605 VMA_ASSERT(allocator && pCreateInfo && pPool);
17607 VMA_DEBUG_LOG(
"vmaCreatePool");
17609 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17611 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17613 #if VMA_RECORDING_ENABLED
17614 if(allocator->GetRecorder() != VMA_NULL)
17616 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17627 VMA_ASSERT(allocator);
17629 if(pool == VK_NULL_HANDLE)
17634 VMA_DEBUG_LOG(
"vmaDestroyPool");
17636 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17638 #if VMA_RECORDING_ENABLED
17639 if(allocator->GetRecorder() != VMA_NULL)
17641 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17645 allocator->DestroyPool(pool);
17653 VMA_ASSERT(allocator && pool && pPoolStats);
17655 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17657 allocator->GetPoolStats(pool, pPoolStats);
17663 size_t* pLostAllocationCount)
17665 VMA_ASSERT(allocator && pool);
17667 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17669 #if VMA_RECORDING_ENABLED
17670 if(allocator->GetRecorder() != VMA_NULL)
17672 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
17676 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
17681 VMA_ASSERT(allocator && pool);
17683 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17685 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
17687 return allocator->CheckPoolCorruption(pool);
17693 const char** ppName)
17695 VMA_ASSERT(allocator && pool && ppName);
17697 VMA_DEBUG_LOG(
"vmaGetPoolName");
17699 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17701 *ppName = pool->GetName();
17709 VMA_ASSERT(allocator && pool);
17711 VMA_DEBUG_LOG(
"vmaSetPoolName");
17713 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17715 pool->SetName(pName);
17717 #if VMA_RECORDING_ENABLED
17718 if(allocator->GetRecorder() != VMA_NULL)
17720 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
17727 const VkMemoryRequirements* pVkMemoryRequirements,
17732 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
17734 VMA_DEBUG_LOG(
"vmaAllocateMemory");
17736 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17738 VkResult result = allocator->AllocateMemory(
17739 *pVkMemoryRequirements,
17746 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17750 #if VMA_RECORDING_ENABLED
17751 if(allocator->GetRecorder() != VMA_NULL)
17753 allocator->GetRecorder()->RecordAllocateMemory(
17754 allocator->GetCurrentFrameIndex(),
17755 *pVkMemoryRequirements,
17761 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17763 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17771 const VkMemoryRequirements* pVkMemoryRequirements,
17773 size_t allocationCount,
17777 if(allocationCount == 0)
17782 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
17784 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
17786 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17788 VkResult result = allocator->AllocateMemory(
17789 *pVkMemoryRequirements,
17796 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17800 #if VMA_RECORDING_ENABLED
17801 if(allocator->GetRecorder() != VMA_NULL)
17803 allocator->GetRecorder()->RecordAllocateMemoryPages(
17804 allocator->GetCurrentFrameIndex(),
17805 *pVkMemoryRequirements,
17807 (uint64_t)allocationCount,
17812 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17814 for(
size_t i = 0; i < allocationCount; ++i)
17816 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
17830 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17832 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
17834 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17836 VkMemoryRequirements vkMemReq = {};
17837 bool requiresDedicatedAllocation =
false;
17838 bool prefersDedicatedAllocation =
false;
17839 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
17840 requiresDedicatedAllocation,
17841 prefersDedicatedAllocation);
17843 VkResult result = allocator->AllocateMemory(
17845 requiresDedicatedAllocation,
17846 prefersDedicatedAllocation,
17851 VMA_SUBALLOCATION_TYPE_BUFFER,
17855 #if VMA_RECORDING_ENABLED
17856 if(allocator->GetRecorder() != VMA_NULL)
17858 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
17859 allocator->GetCurrentFrameIndex(),
17861 requiresDedicatedAllocation,
17862 prefersDedicatedAllocation,
17868 if(pAllocationInfo && result == VK_SUCCESS)
17870 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17883 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17885 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17889 VkMemoryRequirements vkMemReq = {};
17890 bool requiresDedicatedAllocation =
false;
17891 bool prefersDedicatedAllocation =
false;
17892 allocator->GetImageMemoryRequirements(image, vkMemReq,
17893 requiresDedicatedAllocation, prefersDedicatedAllocation);
17895 VkResult result = allocator->AllocateMemory(
17897 requiresDedicatedAllocation,
17898 prefersDedicatedAllocation,
17903 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17907 #if VMA_RECORDING_ENABLED
17908 if(allocator->GetRecorder() != VMA_NULL)
17910 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17911 allocator->GetCurrentFrameIndex(),
17913 requiresDedicatedAllocation,
17914 prefersDedicatedAllocation,
17920 if(pAllocationInfo && result == VK_SUCCESS)
17922 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17932 VMA_ASSERT(allocator);
17934 if(allocation == VK_NULL_HANDLE)
17939 VMA_DEBUG_LOG(
"vmaFreeMemory");
17941 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17943 #if VMA_RECORDING_ENABLED
17944 if(allocator->GetRecorder() != VMA_NULL)
17946 allocator->GetRecorder()->RecordFreeMemory(
17947 allocator->GetCurrentFrameIndex(),
17952 allocator->FreeMemory(
17959 size_t allocationCount,
17962 if(allocationCount == 0)
17967 VMA_ASSERT(allocator);
17969 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17971 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17973 #if VMA_RECORDING_ENABLED
17974 if(allocator->GetRecorder() != VMA_NULL)
17976 allocator->GetRecorder()->RecordFreeMemoryPages(
17977 allocator->GetCurrentFrameIndex(),
17978 (uint64_t)allocationCount,
17983 allocator->FreeMemory(allocationCount, pAllocations);
17989 VkDeviceSize newSize)
17991 VMA_ASSERT(allocator && allocation);
17993 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17995 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17997 return allocator->ResizeAllocation(allocation, newSize);
18005 VMA_ASSERT(allocator && allocation && pAllocationInfo);
18007 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18009 #if VMA_RECORDING_ENABLED
18010 if(allocator->GetRecorder() != VMA_NULL)
18012 allocator->GetRecorder()->RecordGetAllocationInfo(
18013 allocator->GetCurrentFrameIndex(),
18018 allocator->GetAllocationInfo(allocation, pAllocationInfo);
18025 VMA_ASSERT(allocator && allocation);
18027 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18029 #if VMA_RECORDING_ENABLED
18030 if(allocator->GetRecorder() != VMA_NULL)
18032 allocator->GetRecorder()->RecordTouchAllocation(
18033 allocator->GetCurrentFrameIndex(),
18038 return allocator->TouchAllocation(allocation);
18046 VMA_ASSERT(allocator && allocation);
18048 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18050 allocation->SetUserData(allocator, pUserData);
18052 #if VMA_RECORDING_ENABLED
18053 if(allocator->GetRecorder() != VMA_NULL)
18055 allocator->GetRecorder()->RecordSetAllocationUserData(
18056 allocator->GetCurrentFrameIndex(),
18067 VMA_ASSERT(allocator && pAllocation);
18069 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
18071 allocator->CreateLostAllocation(pAllocation);
18073 #if VMA_RECORDING_ENABLED
18074 if(allocator->GetRecorder() != VMA_NULL)
18076 allocator->GetRecorder()->RecordCreateLostAllocation(
18077 allocator->GetCurrentFrameIndex(),
18088 VMA_ASSERT(allocator && allocation && ppData);
18090 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18092 VkResult res = allocator->Map(allocation, ppData);
18094 #if VMA_RECORDING_ENABLED
18095 if(allocator->GetRecorder() != VMA_NULL)
18097 allocator->GetRecorder()->RecordMapMemory(
18098 allocator->GetCurrentFrameIndex(),
18110 VMA_ASSERT(allocator && allocation);
18112 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18114 #if VMA_RECORDING_ENABLED
18115 if(allocator->GetRecorder() != VMA_NULL)
18117 allocator->GetRecorder()->RecordUnmapMemory(
18118 allocator->GetCurrentFrameIndex(),
18123 allocator->Unmap(allocation);
18128 VMA_ASSERT(allocator && allocation);
18130 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18132 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18134 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18136 #if VMA_RECORDING_ENABLED
18137 if(allocator->GetRecorder() != VMA_NULL)
18139 allocator->GetRecorder()->RecordFlushAllocation(
18140 allocator->GetCurrentFrameIndex(),
18141 allocation, offset, size);
18148 VMA_ASSERT(allocator && allocation);
18150 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18152 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18154 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18156 #if VMA_RECORDING_ENABLED
18157 if(allocator->GetRecorder() != VMA_NULL)
18159 allocator->GetRecorder()->RecordInvalidateAllocation(
18160 allocator->GetCurrentFrameIndex(),
18161 allocation, offset, size);
18168 VMA_ASSERT(allocator);
18170 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18172 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18174 return allocator->CheckCorruption(memoryTypeBits);
18180 size_t allocationCount,
18181 VkBool32* pAllocationsChanged,
18191 if(pDefragmentationInfo != VMA_NULL)
18205 if(res == VK_NOT_READY)
18218 VMA_ASSERT(allocator && pInfo && pContext);
18229 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18231 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18233 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18235 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18237 #if VMA_RECORDING_ENABLED
18238 if(allocator->GetRecorder() != VMA_NULL)
18240 allocator->GetRecorder()->RecordDefragmentationBegin(
18241 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18252 VMA_ASSERT(allocator);
18254 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18256 if(context != VK_NULL_HANDLE)
18258 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18260 #if VMA_RECORDING_ENABLED
18261 if(allocator->GetRecorder() != VMA_NULL)
18263 allocator->GetRecorder()->RecordDefragmentationEnd(
18264 allocator->GetCurrentFrameIndex(), context);
18268 return allocator->DefragmentationEnd(context);
18282 VMA_ASSERT(allocator);
18284 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
moveCount, pInfo->
pMoves));
18286 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18288 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18290 if(context == VK_NULL_HANDLE)
18296 return allocator->DefragmentationPassBegin(pInfo, context);
18302 VMA_ASSERT(allocator);
18304 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18305 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18307 if(context == VK_NULL_HANDLE)
18310 return allocator->DefragmentationPassEnd(context);
18318 VMA_ASSERT(allocator && allocation && buffer);
18320 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18322 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18324 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18330 VkDeviceSize allocationLocalOffset,
18334 VMA_ASSERT(allocator && allocation && buffer);
18336 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18338 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18340 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18348 VMA_ASSERT(allocator && allocation && image);
18350 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18352 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18354 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18360 VkDeviceSize allocationLocalOffset,
18364 VMA_ASSERT(allocator && allocation && image);
18366 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18368 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18370 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18375 const VkBufferCreateInfo* pBufferCreateInfo,
18381 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18383 if(pBufferCreateInfo->size == 0)
18385 return VK_ERROR_VALIDATION_FAILED_EXT;
18387 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18388 !allocator->m_UseKhrBufferDeviceAddress)
18390 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18391 return VK_ERROR_VALIDATION_FAILED_EXT;
18394 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18396 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18398 *pBuffer = VK_NULL_HANDLE;
18399 *pAllocation = VK_NULL_HANDLE;
18402 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18403 allocator->m_hDevice,
18405 allocator->GetAllocationCallbacks(),
18410 VkMemoryRequirements vkMemReq = {};
18411 bool requiresDedicatedAllocation =
false;
18412 bool prefersDedicatedAllocation =
false;
18413 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18414 requiresDedicatedAllocation, prefersDedicatedAllocation);
18417 res = allocator->AllocateMemory(
18419 requiresDedicatedAllocation,
18420 prefersDedicatedAllocation,
18422 pBufferCreateInfo->usage,
18424 *pAllocationCreateInfo,
18425 VMA_SUBALLOCATION_TYPE_BUFFER,
18429 #if VMA_RECORDING_ENABLED
18430 if(allocator->GetRecorder() != VMA_NULL)
18432 allocator->GetRecorder()->RecordCreateBuffer(
18433 allocator->GetCurrentFrameIndex(),
18434 *pBufferCreateInfo,
18435 *pAllocationCreateInfo,
18445 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18450 #if VMA_STATS_STRING_ENABLED
18451 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18453 if(pAllocationInfo != VMA_NULL)
18455 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18460 allocator->FreeMemory(
18463 *pAllocation = VK_NULL_HANDLE;
18464 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18465 *pBuffer = VK_NULL_HANDLE;
18468 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18469 *pBuffer = VK_NULL_HANDLE;
18480 VMA_ASSERT(allocator);
18482 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18487 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18489 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18491 #if VMA_RECORDING_ENABLED
18492 if(allocator->GetRecorder() != VMA_NULL)
18494 allocator->GetRecorder()->RecordDestroyBuffer(
18495 allocator->GetCurrentFrameIndex(),
18500 if(buffer != VK_NULL_HANDLE)
18502 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18505 if(allocation != VK_NULL_HANDLE)
18507 allocator->FreeMemory(
18515 const VkImageCreateInfo* pImageCreateInfo,
18521 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18523 if(pImageCreateInfo->extent.width == 0 ||
18524 pImageCreateInfo->extent.height == 0 ||
18525 pImageCreateInfo->extent.depth == 0 ||
18526 pImageCreateInfo->mipLevels == 0 ||
18527 pImageCreateInfo->arrayLayers == 0)
18529 return VK_ERROR_VALIDATION_FAILED_EXT;
18532 VMA_DEBUG_LOG(
"vmaCreateImage");
18534 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18536 *pImage = VK_NULL_HANDLE;
18537 *pAllocation = VK_NULL_HANDLE;
18540 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18541 allocator->m_hDevice,
18543 allocator->GetAllocationCallbacks(),
18547 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18548 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18549 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18552 VkMemoryRequirements vkMemReq = {};
18553 bool requiresDedicatedAllocation =
false;
18554 bool prefersDedicatedAllocation =
false;
18555 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18556 requiresDedicatedAllocation, prefersDedicatedAllocation);
18558 res = allocator->AllocateMemory(
18560 requiresDedicatedAllocation,
18561 prefersDedicatedAllocation,
18565 *pAllocationCreateInfo,
18570 #if VMA_RECORDING_ENABLED
18571 if(allocator->GetRecorder() != VMA_NULL)
18573 allocator->GetRecorder()->RecordCreateImage(
18574 allocator->GetCurrentFrameIndex(),
18576 *pAllocationCreateInfo,
18586 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18591 #if VMA_STATS_STRING_ENABLED
18592 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18594 if(pAllocationInfo != VMA_NULL)
18596 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18601 allocator->FreeMemory(
18604 *pAllocation = VK_NULL_HANDLE;
18605 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18606 *pImage = VK_NULL_HANDLE;
18609 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18610 *pImage = VK_NULL_HANDLE;
18621 VMA_ASSERT(allocator);
18623 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18628 VMA_DEBUG_LOG(
"vmaDestroyImage");
18630 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18632 #if VMA_RECORDING_ENABLED
18633 if(allocator->GetRecorder() != VMA_NULL)
18635 allocator->GetRecorder()->RecordDestroyImage(
18636 allocator->GetCurrentFrameIndex(),
18641 if(image != VK_NULL_HANDLE)
18643 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
18645 if(allocation != VK_NULL_HANDLE)
18647 allocator->FreeMemory(
18653 #endif // #ifdef VMA_IMPLEMENTATION