23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1765 #ifndef VMA_RECORDING_ENABLED
1766 #define VMA_RECORDING_ENABLED 0
1770 #define NOMINMAX // For windows.h
1774 #include <vulkan/vulkan.h>
1777 #if VMA_RECORDING_ENABLED
1778 #include <windows.h>
1784 #if !defined(VMA_VULKAN_VERSION)
1785 #if defined(VK_VERSION_1_1)
1786 #define VMA_VULKAN_VERSION 1001000
1788 #define VMA_VULKAN_VERSION 1000000
1792 #if !defined(VMA_DEDICATED_ALLOCATION)
1793 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1794 #define VMA_DEDICATED_ALLOCATION 1
1796 #define VMA_DEDICATED_ALLOCATION 0
1800 #if !defined(VMA_BIND_MEMORY2)
1801 #if VK_KHR_bind_memory2
1802 #define VMA_BIND_MEMORY2 1
1804 #define VMA_BIND_MEMORY2 0
1808 #if !defined(VMA_MEMORY_BUDGET)
1809 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1810 #define VMA_MEMORY_BUDGET 1
1812 #define VMA_MEMORY_BUDGET 0
1821 #ifndef VMA_CALL_PRE
1822 #define VMA_CALL_PRE
1824 #ifndef VMA_CALL_POST
1825 #define VMA_CALL_POST
1842 uint32_t memoryType,
1843 VkDeviceMemory memory,
1848 uint32_t memoryType,
1849 VkDeviceMemory memory,
1952 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
1953 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1954 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1956 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
1957 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1958 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1960 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
1961 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2103 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2111 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2121 uint32_t memoryTypeIndex,
2122 VkMemoryPropertyFlags* pFlags);
2134 uint32_t frameIndex);
2230 #ifndef VMA_STATS_STRING_ENABLED
2231 #define VMA_STATS_STRING_ENABLED 1
2234 #if VMA_STATS_STRING_ENABLED
2241 char** ppStatsString,
2242 VkBool32 detailedMap);
2246 char* pStatsString);
2248 #endif // #if VMA_STATS_STRING_ENABLED
2500 uint32_t memoryTypeBits,
2502 uint32_t* pMemoryTypeIndex);
2518 const VkBufferCreateInfo* pBufferCreateInfo,
2520 uint32_t* pMemoryTypeIndex);
2536 const VkImageCreateInfo* pImageCreateInfo,
2538 uint32_t* pMemoryTypeIndex);
2710 size_t* pLostAllocationCount);
2737 const char** ppName);
2830 const VkMemoryRequirements* pVkMemoryRequirements,
2856 const VkMemoryRequirements* pVkMemoryRequirements,
2858 size_t allocationCount,
2903 size_t allocationCount,
2915 VkDeviceSize newSize);
3307 size_t allocationCount,
3308 VkBool32* pAllocationsChanged,
3342 VkDeviceSize allocationLocalOffset,
3376 VkDeviceSize allocationLocalOffset,
3408 const VkBufferCreateInfo* pBufferCreateInfo,
3433 const VkImageCreateInfo* pImageCreateInfo,
3459 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3462 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3463 #define VMA_IMPLEMENTATION
3466 #ifdef VMA_IMPLEMENTATION
3467 #undef VMA_IMPLEMENTATION
3489 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3490 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3502 #if VMA_USE_STL_CONTAINERS
3503 #define VMA_USE_STL_VECTOR 1
3504 #define VMA_USE_STL_UNORDERED_MAP 1
3505 #define VMA_USE_STL_LIST 1
3508 #ifndef VMA_USE_STL_SHARED_MUTEX
3510 #if __cplusplus >= 201703L
3511 #define VMA_USE_STL_SHARED_MUTEX 1
3515 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3516 #define VMA_USE_STL_SHARED_MUTEX 1
3518 #define VMA_USE_STL_SHARED_MUTEX 0
3526 #if VMA_USE_STL_VECTOR
3530 #if VMA_USE_STL_UNORDERED_MAP
3531 #include <unordered_map>
3534 #if VMA_USE_STL_LIST
3543 #include <algorithm>
3548 #define VMA_NULL nullptr
3551 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3553 void *aligned_alloc(
size_t alignment,
size_t size)
3556 if(alignment <
sizeof(
void*))
3558 alignment =
sizeof(
void*);
3561 return memalign(alignment, size);
3563 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3565 void *aligned_alloc(
size_t alignment,
size_t size)
3568 if(alignment <
sizeof(
void*))
3570 alignment =
sizeof(
void*);
3574 if(posix_memalign(&pointer, alignment, size) == 0)
3588 #define VMA_ASSERT(expr) assert(expr)
3590 #define VMA_ASSERT(expr)
3596 #ifndef VMA_HEAVY_ASSERT
3598 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3600 #define VMA_HEAVY_ASSERT(expr)
3604 #ifndef VMA_ALIGN_OF
3605 #define VMA_ALIGN_OF(type) (__alignof(type))
3608 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3610 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3612 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3616 #ifndef VMA_SYSTEM_FREE
3618 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3620 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3625 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3629 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3633 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3637 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3640 #ifndef VMA_DEBUG_LOG
3641 #define VMA_DEBUG_LOG(format, ...)
3651 #if VMA_STATS_STRING_ENABLED
3652 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3654 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3656 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3658 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3660 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3662 snprintf(outStr, strLen,
"%p", ptr);
3670 void Lock() { m_Mutex.lock(); }
3671 void Unlock() { m_Mutex.unlock(); }
3675 #define VMA_MUTEX VmaMutex
3679 #ifndef VMA_RW_MUTEX
3680 #if VMA_USE_STL_SHARED_MUTEX
3682 #include <shared_mutex>
3686 void LockRead() { m_Mutex.lock_shared(); }
3687 void UnlockRead() { m_Mutex.unlock_shared(); }
3688 void LockWrite() { m_Mutex.lock(); }
3689 void UnlockWrite() { m_Mutex.unlock(); }
3691 std::shared_mutex m_Mutex;
3693 #define VMA_RW_MUTEX VmaRWMutex
3694 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3700 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3701 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3702 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3703 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3704 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3708 #define VMA_RW_MUTEX VmaRWMutex
3714 void LockRead() { m_Mutex.Lock(); }
3715 void UnlockRead() { m_Mutex.Unlock(); }
3716 void LockWrite() { m_Mutex.Lock(); }
3717 void UnlockWrite() { m_Mutex.Unlock(); }
3721 #define VMA_RW_MUTEX VmaRWMutex
3722 #endif // #if VMA_USE_STL_SHARED_MUTEX
3723 #endif // #ifndef VMA_RW_MUTEX
3728 #ifndef VMA_ATOMIC_UINT32
3730 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3733 #ifndef VMA_ATOMIC_UINT64
3735 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3738 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3743 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3746 #ifndef VMA_DEBUG_ALIGNMENT
3751 #define VMA_DEBUG_ALIGNMENT (1)
3754 #ifndef VMA_DEBUG_MARGIN
3759 #define VMA_DEBUG_MARGIN (0)
3762 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3767 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3770 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3776 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3779 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3784 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3787 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3792 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3795 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3796 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3800 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3801 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3805 #ifndef VMA_CLASS_NO_COPY
3806 #define VMA_CLASS_NO_COPY(className) \
3808 className(const className&) = delete; \
3809 className& operator=(const className&) = delete;
3812 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3815 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3817 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3818 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3824 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3826 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3827 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3830 static inline uint32_t VmaCountBitsSet(uint32_t v)
3832 uint32_t c = v - ((v >> 1) & 0x55555555);
3833 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3834 c = ((c >> 4) + c) & 0x0F0F0F0F;
3835 c = ((c >> 8) + c) & 0x00FF00FF;
3836 c = ((c >> 16) + c) & 0x0000FFFF;
3842 template <
typename T>
3843 static inline T VmaAlignUp(T val, T align)
3845 return (val + align - 1) / align * align;
3849 template <
typename T>
3850 static inline T VmaAlignDown(T val, T align)
3852 return val / align * align;
3856 template <
typename T>
3857 static inline T VmaRoundDiv(T x, T y)
3859 return (x + (y / (T)2)) / y;
3867 template <
typename T>
3868 inline bool VmaIsPow2(T x)
3870 return (x & (x-1)) == 0;
3874 static inline uint32_t VmaNextPow2(uint32_t v)
3885 static inline uint64_t VmaNextPow2(uint64_t v)
3899 static inline uint32_t VmaPrevPow2(uint32_t v)
3909 static inline uint64_t VmaPrevPow2(uint64_t v)
3921 static inline bool VmaStrIsEmpty(
const char* pStr)
3923 return pStr == VMA_NULL || *pStr ==
'\0';
3926 #if VMA_STATS_STRING_ENABLED
3928 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3944 #endif // #if VMA_STATS_STRING_ENABLED
3948 template<
typename Iterator,
typename Compare>
3949 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3951 Iterator centerValue = end; --centerValue;
3952 Iterator insertIndex = beg;
3953 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3955 if(cmp(*memTypeIndex, *centerValue))
3957 if(insertIndex != memTypeIndex)
3959 VMA_SWAP(*memTypeIndex, *insertIndex);
3964 if(insertIndex != centerValue)
3966 VMA_SWAP(*insertIndex, *centerValue);
3971 template<
typename Iterator,
typename Compare>
3972 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3976 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3977 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3978 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3982 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
3984 #endif // #ifndef VMA_SORT
3993 static inline bool VmaBlocksOnSamePage(
3994 VkDeviceSize resourceAOffset,
3995 VkDeviceSize resourceASize,
3996 VkDeviceSize resourceBOffset,
3997 VkDeviceSize pageSize)
3999 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4000 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4001 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4002 VkDeviceSize resourceBStart = resourceBOffset;
4003 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4004 return resourceAEndPage == resourceBStartPage;
4007 enum VmaSuballocationType
4009 VMA_SUBALLOCATION_TYPE_FREE = 0,
4010 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4011 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4012 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4013 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4014 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4015 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4024 static inline bool VmaIsBufferImageGranularityConflict(
4025 VmaSuballocationType suballocType1,
4026 VmaSuballocationType suballocType2)
4028 if(suballocType1 > suballocType2)
4030 VMA_SWAP(suballocType1, suballocType2);
4033 switch(suballocType1)
4035 case VMA_SUBALLOCATION_TYPE_FREE:
4037 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4039 case VMA_SUBALLOCATION_TYPE_BUFFER:
4041 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4042 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4043 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4045 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4046 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4047 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4048 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4050 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4051 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4059 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4061 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4062 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4063 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4064 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4066 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4073 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4075 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4076 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4077 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4078 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4080 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4093 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4095 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4096 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4097 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4098 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4104 VMA_CLASS_NO_COPY(VmaMutexLock)
4106 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4107 m_pMutex(useMutex ? &mutex : VMA_NULL)
4108 {
if(m_pMutex) { m_pMutex->Lock(); } }
4110 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4112 VMA_MUTEX* m_pMutex;
4116 struct VmaMutexLockRead
4118 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4120 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4121 m_pMutex(useMutex ? &mutex : VMA_NULL)
4122 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4123 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4125 VMA_RW_MUTEX* m_pMutex;
4129 struct VmaMutexLockWrite
4131 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4133 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4134 m_pMutex(useMutex ? &mutex : VMA_NULL)
4135 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4136 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4138 VMA_RW_MUTEX* m_pMutex;
4141 #if VMA_DEBUG_GLOBAL_MUTEX
4142 static VMA_MUTEX gDebugGlobalMutex;
4143 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4145 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4149 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4160 template <
typename CmpLess,
typename IterT,
typename KeyT>
4161 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4163 size_t down = 0, up = (end - beg);
4166 const size_t mid = (down + up) / 2;
4167 if(cmp(*(beg+mid), key))
4179 template<
typename CmpLess,
typename IterT,
typename KeyT>
4180 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4182 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4183 beg, end, value, cmp);
4185 (!cmp(*it, value) && !cmp(value, *it)))
4197 template<
typename T>
4198 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4200 for(uint32_t i = 0; i < count; ++i)
4202 const T iPtr = arr[i];
4203 if(iPtr == VMA_NULL)
4207 for(uint32_t j = i + 1; j < count; ++j)
4221 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4223 if((pAllocationCallbacks != VMA_NULL) &&
4224 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4226 return (*pAllocationCallbacks->pfnAllocation)(
4227 pAllocationCallbacks->pUserData,
4230 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4234 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4238 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4240 if((pAllocationCallbacks != VMA_NULL) &&
4241 (pAllocationCallbacks->pfnFree != VMA_NULL))
4243 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4247 VMA_SYSTEM_FREE(ptr);
4251 template<
typename T>
4252 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4254 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4257 template<
typename T>
4258 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4260 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4263 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4265 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4267 template<
typename T>
4268 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4271 VmaFree(pAllocationCallbacks, ptr);
4274 template<
typename T>
4275 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4279 for(
size_t i = count; i--; )
4283 VmaFree(pAllocationCallbacks, ptr);
4287 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4289 if(srcStr != VMA_NULL)
4291 const size_t len = strlen(srcStr);
4292 char*
const result = vma_new_array(allocs,
char, len + 1);
4293 memcpy(result, srcStr, len + 1);
4302 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4306 const size_t len = strlen(str);
4307 vma_delete_array(allocs, str, len + 1);
4312 template<
typename T>
4313 class VmaStlAllocator
4316 const VkAllocationCallbacks*
const m_pCallbacks;
4317 typedef T value_type;
4319 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4320 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4322 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4323 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4325 template<
typename U>
4326 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4328 return m_pCallbacks == rhs.m_pCallbacks;
4330 template<
typename U>
4331 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4333 return m_pCallbacks != rhs.m_pCallbacks;
4336 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4339 #if VMA_USE_STL_VECTOR
4341 #define VmaVector std::vector
4343 template<
typename T,
typename allocatorT>
4344 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4346 vec.insert(vec.begin() + index, item);
4349 template<
typename T,
typename allocatorT>
4350 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4352 vec.erase(vec.begin() + index);
4355 #else // #if VMA_USE_STL_VECTOR
4360 template<
typename T,
typename AllocatorT>
4364 typedef T value_type;
4366 VmaVector(
const AllocatorT& allocator) :
4367 m_Allocator(allocator),
4374 VmaVector(
size_t count,
const AllocatorT& allocator) :
4375 m_Allocator(allocator),
4376 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4384 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4385 : VmaVector(count, allocator) {}
4387 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4388 m_Allocator(src.m_Allocator),
4389 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4390 m_Count(src.m_Count),
4391 m_Capacity(src.m_Count)
4395 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4401 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4404 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4408 resize(rhs.m_Count);
4411 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4417 bool empty()
const {
return m_Count == 0; }
4418 size_t size()
const {
return m_Count; }
4419 T* data() {
return m_pArray; }
4420 const T* data()
const {
return m_pArray; }
4422 T& operator[](
size_t index)
4424 VMA_HEAVY_ASSERT(index < m_Count);
4425 return m_pArray[index];
4427 const T& operator[](
size_t index)
const
4429 VMA_HEAVY_ASSERT(index < m_Count);
4430 return m_pArray[index];
4435 VMA_HEAVY_ASSERT(m_Count > 0);
4438 const T& front()
const
4440 VMA_HEAVY_ASSERT(m_Count > 0);
4445 VMA_HEAVY_ASSERT(m_Count > 0);
4446 return m_pArray[m_Count - 1];
4448 const T& back()
const
4450 VMA_HEAVY_ASSERT(m_Count > 0);
4451 return m_pArray[m_Count - 1];
4454 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4456 newCapacity = VMA_MAX(newCapacity, m_Count);
4458 if((newCapacity < m_Capacity) && !freeMemory)
4460 newCapacity = m_Capacity;
4463 if(newCapacity != m_Capacity)
4465 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4468 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4470 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4471 m_Capacity = newCapacity;
4472 m_pArray = newArray;
4476 void resize(
size_t newCount,
bool freeMemory =
false)
4478 size_t newCapacity = m_Capacity;
4479 if(newCount > m_Capacity)
4481 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4485 newCapacity = newCount;
4488 if(newCapacity != m_Capacity)
4490 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4491 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4492 if(elementsToCopy != 0)
4494 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4496 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4497 m_Capacity = newCapacity;
4498 m_pArray = newArray;
4504 void clear(
bool freeMemory =
false)
4506 resize(0, freeMemory);
4509 void insert(
size_t index,
const T& src)
4511 VMA_HEAVY_ASSERT(index <= m_Count);
4512 const size_t oldCount = size();
4513 resize(oldCount + 1);
4514 if(index < oldCount)
4516 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4518 m_pArray[index] = src;
4521 void remove(
size_t index)
4523 VMA_HEAVY_ASSERT(index < m_Count);
4524 const size_t oldCount = size();
4525 if(index < oldCount - 1)
4527 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4529 resize(oldCount - 1);
4532 void push_back(
const T& src)
4534 const size_t newIndex = size();
4535 resize(newIndex + 1);
4536 m_pArray[newIndex] = src;
4541 VMA_HEAVY_ASSERT(m_Count > 0);
4545 void push_front(
const T& src)
4552 VMA_HEAVY_ASSERT(m_Count > 0);
4556 typedef T* iterator;
4558 iterator begin() {
return m_pArray; }
4559 iterator end() {
return m_pArray + m_Count; }
4562 AllocatorT m_Allocator;
4568 template<
typename T,
typename allocatorT>
4569 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4571 vec.insert(index, item);
4574 template<
typename T,
typename allocatorT>
4575 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4580 #endif // #if VMA_USE_STL_VECTOR
4582 template<
typename CmpLess,
typename VectorT>
4583 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4585 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4587 vector.data() + vector.size(),
4589 CmpLess()) - vector.data();
4590 VmaVectorInsert(vector, indexToInsert, value);
4591 return indexToInsert;
4594 template<
typename CmpLess,
typename VectorT>
4595 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4598 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4603 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4605 size_t indexToRemove = it - vector.begin();
4606 VmaVectorRemove(vector, indexToRemove);
4620 template<
typename T>
4621 class VmaPoolAllocator
4623 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4625 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4626 ~VmaPoolAllocator();
4633 uint32_t NextFreeIndex;
4634 alignas(T)
char Value[
sizeof(T)];
4641 uint32_t FirstFreeIndex;
4644 const VkAllocationCallbacks* m_pAllocationCallbacks;
4645 const uint32_t m_FirstBlockCapacity;
4646 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4648 ItemBlock& CreateNewBlock();
4651 template<
typename T>
4652 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4653 m_pAllocationCallbacks(pAllocationCallbacks),
4654 m_FirstBlockCapacity(firstBlockCapacity),
4655 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4657 VMA_ASSERT(m_FirstBlockCapacity > 1);
4660 template<
typename T>
4661 VmaPoolAllocator<T>::~VmaPoolAllocator()
4663 for(
size_t i = m_ItemBlocks.size(); i--; )
4664 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4665 m_ItemBlocks.clear();
4668 template<
typename T>
4669 T* VmaPoolAllocator<T>::Alloc()
4671 for(
size_t i = m_ItemBlocks.size(); i--; )
4673 ItemBlock& block = m_ItemBlocks[i];
4675 if(block.FirstFreeIndex != UINT32_MAX)
4677 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4678 block.FirstFreeIndex = pItem->NextFreeIndex;
4679 T* result = (T*)&pItem->Value;
4686 ItemBlock& newBlock = CreateNewBlock();
4687 Item*
const pItem = &newBlock.pItems[0];
4688 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4689 T* result = (T*)&pItem->Value;
4694 template<
typename T>
4695 void VmaPoolAllocator<T>::Free(T* ptr)
4698 for(
size_t i = m_ItemBlocks.size(); i--; )
4700 ItemBlock& block = m_ItemBlocks[i];
4704 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4707 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4710 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4711 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4712 block.FirstFreeIndex = index;
4716 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4719 template<
typename T>
4720 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4722 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4723 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4725 const ItemBlock newBlock = {
4726 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4730 m_ItemBlocks.push_back(newBlock);
4733 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4734 newBlock.pItems[i].NextFreeIndex = i + 1;
4735 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4736 return m_ItemBlocks.back();
4742 #if VMA_USE_STL_LIST
4744 #define VmaList std::list
4746 #else // #if VMA_USE_STL_LIST
4748 template<
typename T>
4757 template<
typename T>
4760 VMA_CLASS_NO_COPY(VmaRawList)
4762 typedef VmaListItem<T> ItemType;
4764 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4768 size_t GetCount()
const {
return m_Count; }
4769 bool IsEmpty()
const {
return m_Count == 0; }
4771 ItemType* Front() {
return m_pFront; }
4772 const ItemType* Front()
const {
return m_pFront; }
4773 ItemType* Back() {
return m_pBack; }
4774 const ItemType* Back()
const {
return m_pBack; }
4776 ItemType* PushBack();
4777 ItemType* PushFront();
4778 ItemType* PushBack(
const T& value);
4779 ItemType* PushFront(
const T& value);
4784 ItemType* InsertBefore(ItemType* pItem);
4786 ItemType* InsertAfter(ItemType* pItem);
4788 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4789 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4791 void Remove(ItemType* pItem);
4794 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4795 VmaPoolAllocator<ItemType> m_ItemAllocator;
4801 template<
typename T>
4802 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4803 m_pAllocationCallbacks(pAllocationCallbacks),
4804 m_ItemAllocator(pAllocationCallbacks, 128),
4811 template<
typename T>
4812 VmaRawList<T>::~VmaRawList()
4818 template<
typename T>
4819 void VmaRawList<T>::Clear()
4821 if(IsEmpty() ==
false)
4823 ItemType* pItem = m_pBack;
4824 while(pItem != VMA_NULL)
4826 ItemType*
const pPrevItem = pItem->pPrev;
4827 m_ItemAllocator.Free(pItem);
4830 m_pFront = VMA_NULL;
4836 template<
typename T>
4837 VmaListItem<T>* VmaRawList<T>::PushBack()
4839 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4840 pNewItem->pNext = VMA_NULL;
4843 pNewItem->pPrev = VMA_NULL;
4844 m_pFront = pNewItem;
4850 pNewItem->pPrev = m_pBack;
4851 m_pBack->pNext = pNewItem;
4858 template<
typename T>
4859 VmaListItem<T>* VmaRawList<T>::PushFront()
4861 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4862 pNewItem->pPrev = VMA_NULL;
4865 pNewItem->pNext = VMA_NULL;
4866 m_pFront = pNewItem;
4872 pNewItem->pNext = m_pFront;
4873 m_pFront->pPrev = pNewItem;
4874 m_pFront = pNewItem;
4880 template<
typename T>
4881 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4883 ItemType*
const pNewItem = PushBack();
4884 pNewItem->Value = value;
4888 template<
typename T>
4889 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4891 ItemType*
const pNewItem = PushFront();
4892 pNewItem->Value = value;
4896 template<
typename T>
4897 void VmaRawList<T>::PopBack()
4899 VMA_HEAVY_ASSERT(m_Count > 0);
4900 ItemType*
const pBackItem = m_pBack;
4901 ItemType*
const pPrevItem = pBackItem->pPrev;
4902 if(pPrevItem != VMA_NULL)
4904 pPrevItem->pNext = VMA_NULL;
4906 m_pBack = pPrevItem;
4907 m_ItemAllocator.Free(pBackItem);
4911 template<
typename T>
4912 void VmaRawList<T>::PopFront()
4914 VMA_HEAVY_ASSERT(m_Count > 0);
4915 ItemType*
const pFrontItem = m_pFront;
4916 ItemType*
const pNextItem = pFrontItem->pNext;
4917 if(pNextItem != VMA_NULL)
4919 pNextItem->pPrev = VMA_NULL;
4921 m_pFront = pNextItem;
4922 m_ItemAllocator.Free(pFrontItem);
4926 template<
typename T>
4927 void VmaRawList<T>::Remove(ItemType* pItem)
4929 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4930 VMA_HEAVY_ASSERT(m_Count > 0);
4932 if(pItem->pPrev != VMA_NULL)
4934 pItem->pPrev->pNext = pItem->pNext;
4938 VMA_HEAVY_ASSERT(m_pFront == pItem);
4939 m_pFront = pItem->pNext;
4942 if(pItem->pNext != VMA_NULL)
4944 pItem->pNext->pPrev = pItem->pPrev;
4948 VMA_HEAVY_ASSERT(m_pBack == pItem);
4949 m_pBack = pItem->pPrev;
4952 m_ItemAllocator.Free(pItem);
4956 template<
typename T>
4957 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4959 if(pItem != VMA_NULL)
4961 ItemType*
const prevItem = pItem->pPrev;
4962 ItemType*
const newItem = m_ItemAllocator.Alloc();
4963 newItem->pPrev = prevItem;
4964 newItem->pNext = pItem;
4965 pItem->pPrev = newItem;
4966 if(prevItem != VMA_NULL)
4968 prevItem->pNext = newItem;
4972 VMA_HEAVY_ASSERT(m_pFront == pItem);
4982 template<
typename T>
4983 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4985 if(pItem != VMA_NULL)
4987 ItemType*
const nextItem = pItem->pNext;
4988 ItemType*
const newItem = m_ItemAllocator.Alloc();
4989 newItem->pNext = nextItem;
4990 newItem->pPrev = pItem;
4991 pItem->pNext = newItem;
4992 if(nextItem != VMA_NULL)
4994 nextItem->pPrev = newItem;
4998 VMA_HEAVY_ASSERT(m_pBack == pItem);
5008 template<
typename T>
5009 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5011 ItemType*
const newItem = InsertBefore(pItem);
5012 newItem->Value = value;
5016 template<
typename T>
5017 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5019 ItemType*
const newItem = InsertAfter(pItem);
5020 newItem->Value = value;
5024 template<
typename T,
typename AllocatorT>
5027 VMA_CLASS_NO_COPY(VmaList)
5038 T& operator*()
const
5040 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5041 return m_pItem->Value;
5043 T* operator->()
const
5045 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5046 return &m_pItem->Value;
5049 iterator& operator++()
5051 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5052 m_pItem = m_pItem->pNext;
5055 iterator& operator--()
5057 if(m_pItem != VMA_NULL)
5059 m_pItem = m_pItem->pPrev;
5063 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5064 m_pItem = m_pList->Back();
5069 iterator operator++(
int)
5071 iterator result = *
this;
5075 iterator operator--(
int)
5077 iterator result = *
this;
5082 bool operator==(
const iterator& rhs)
const
5084 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5085 return m_pItem == rhs.m_pItem;
5087 bool operator!=(
const iterator& rhs)
const
5089 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5090 return m_pItem != rhs.m_pItem;
5094 VmaRawList<T>* m_pList;
5095 VmaListItem<T>* m_pItem;
5097 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5103 friend class VmaList<T, AllocatorT>;
5106 class const_iterator
5115 const_iterator(
const iterator& src) :
5116 m_pList(src.m_pList),
5117 m_pItem(src.m_pItem)
5121 const T& operator*()
const
5123 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5124 return m_pItem->Value;
5126 const T* operator->()
const
5128 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5129 return &m_pItem->Value;
5132 const_iterator& operator++()
5134 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5135 m_pItem = m_pItem->pNext;
5138 const_iterator& operator--()
5140 if(m_pItem != VMA_NULL)
5142 m_pItem = m_pItem->pPrev;
5146 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5147 m_pItem = m_pList->Back();
5152 const_iterator operator++(
int)
5154 const_iterator result = *
this;
5158 const_iterator operator--(
int)
5160 const_iterator result = *
this;
5165 bool operator==(
const const_iterator& rhs)
const
5167 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5168 return m_pItem == rhs.m_pItem;
5170 bool operator!=(
const const_iterator& rhs)
const
5172 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5173 return m_pItem != rhs.m_pItem;
5177 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5183 const VmaRawList<T>* m_pList;
5184 const VmaListItem<T>* m_pItem;
5186 friend class VmaList<T, AllocatorT>;
5189 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5191 bool empty()
const {
return m_RawList.IsEmpty(); }
5192 size_t size()
const {
return m_RawList.GetCount(); }
5194 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5195 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5197 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5198 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5200 void clear() { m_RawList.Clear(); }
5201 void push_back(
const T& value) { m_RawList.PushBack(value); }
5202 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5203 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5206 VmaRawList<T> m_RawList;
5209 #endif // #if VMA_USE_STL_LIST
5217 #if VMA_USE_STL_UNORDERED_MAP
5219 #define VmaPair std::pair
5221 #define VMA_MAP_TYPE(KeyT, ValueT) \
5222 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5224 #else // #if VMA_USE_STL_UNORDERED_MAP
5226 template<
typename T1,
typename T2>
5232 VmaPair() : first(), second() { }
5233 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5239 template<
typename KeyT,
typename ValueT>
5243 typedef VmaPair<KeyT, ValueT> PairType;
5244 typedef PairType* iterator;
5246 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5248 iterator begin() {
return m_Vector.begin(); }
5249 iterator end() {
return m_Vector.end(); }
5251 void insert(
const PairType& pair);
5252 iterator find(
const KeyT& key);
5253 void erase(iterator it);
5256 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5259 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5261 template<
typename FirstT,
typename SecondT>
5262 struct VmaPairFirstLess
5264 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5266 return lhs.first < rhs.first;
5268 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5270 return lhs.first < rhsFirst;
5274 template<
typename KeyT,
typename ValueT>
5275 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5277 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5279 m_Vector.data() + m_Vector.size(),
5281 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5282 VmaVectorInsert(m_Vector, indexToInsert, pair);
5285 template<
typename KeyT,
typename ValueT>
5286 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5288 PairType* it = VmaBinaryFindFirstNotLess(
5290 m_Vector.data() + m_Vector.size(),
5292 VmaPairFirstLess<KeyT, ValueT>());
5293 if((it != m_Vector.end()) && (it->first == key))
5299 return m_Vector.end();
5303 template<
typename KeyT,
typename ValueT>
5304 void VmaMap<KeyT, ValueT>::erase(iterator it)
5306 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5309 #endif // #if VMA_USE_STL_UNORDERED_MAP
5315 class VmaDeviceMemoryBlock;
5317 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5319 struct VmaAllocation_T
5322 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5326 FLAG_USER_DATA_STRING = 0x01,
5330 enum ALLOCATION_TYPE
5332 ALLOCATION_TYPE_NONE,
5333 ALLOCATION_TYPE_BLOCK,
5334 ALLOCATION_TYPE_DEDICATED,
5341 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5345 m_MemoryTypeIndex = 0;
5346 m_pUserData = VMA_NULL;
5347 m_LastUseFrameIndex = currentFrameIndex;
5348 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5349 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5351 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5353 #if VMA_STATS_STRING_ENABLED
5354 m_CreationFrameIndex = currentFrameIndex;
5355 m_BufferImageUsage = 0;
5361 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5364 VMA_ASSERT(m_pUserData == VMA_NULL);
5367 void InitBlockAllocation(
5368 VmaDeviceMemoryBlock* block,
5369 VkDeviceSize offset,
5370 VkDeviceSize alignment,
5372 uint32_t memoryTypeIndex,
5373 VmaSuballocationType suballocationType,
5377 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5378 VMA_ASSERT(block != VMA_NULL);
5379 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5380 m_Alignment = alignment;
5382 m_MemoryTypeIndex = memoryTypeIndex;
5383 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5384 m_SuballocationType = (uint8_t)suballocationType;
5385 m_BlockAllocation.m_Block = block;
5386 m_BlockAllocation.m_Offset = offset;
5387 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5392 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5393 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5394 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5395 m_MemoryTypeIndex = 0;
5396 m_BlockAllocation.m_Block = VMA_NULL;
5397 m_BlockAllocation.m_Offset = 0;
5398 m_BlockAllocation.m_CanBecomeLost =
true;
5401 void ChangeBlockAllocation(
5403 VmaDeviceMemoryBlock* block,
5404 VkDeviceSize offset);
5406 void ChangeOffset(VkDeviceSize newOffset);
5409 void InitDedicatedAllocation(
5410 uint32_t memoryTypeIndex,
5411 VkDeviceMemory hMemory,
5412 VmaSuballocationType suballocationType,
5416 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5417 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5418 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5421 m_MemoryTypeIndex = memoryTypeIndex;
5422 m_SuballocationType = (uint8_t)suballocationType;
5423 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5424 m_DedicatedAllocation.m_hMemory = hMemory;
5425 m_DedicatedAllocation.m_pMappedData = pMappedData;
5428 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5429 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5430 VkDeviceSize GetSize()
const {
return m_Size; }
5431 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5432 void* GetUserData()
const {
return m_pUserData; }
5433 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5434 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5436 VmaDeviceMemoryBlock* GetBlock()
const
5438 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5439 return m_BlockAllocation.m_Block;
5441 VkDeviceSize GetOffset()
const;
5442 VkDeviceMemory GetMemory()
const;
5443 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5444 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5445 void* GetMappedData()
const;
5446 bool CanBecomeLost()
const;
5448 uint32_t GetLastUseFrameIndex()
const
5450 return m_LastUseFrameIndex.load();
5452 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5454 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5464 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5466 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5468 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5479 void BlockAllocMap();
5480 void BlockAllocUnmap();
5481 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5484 #if VMA_STATS_STRING_ENABLED
5485 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5486 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5488 void InitBufferImageUsage(uint32_t bufferImageUsage)
5490 VMA_ASSERT(m_BufferImageUsage == 0);
5491 m_BufferImageUsage = bufferImageUsage;
5494 void PrintParameters(
class VmaJsonWriter& json)
const;
5498 VkDeviceSize m_Alignment;
5499 VkDeviceSize m_Size;
5501 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5502 uint32_t m_MemoryTypeIndex;
5504 uint8_t m_SuballocationType;
5511 struct BlockAllocation
5513 VmaDeviceMemoryBlock* m_Block;
5514 VkDeviceSize m_Offset;
5515 bool m_CanBecomeLost;
5519 struct DedicatedAllocation
5521 VkDeviceMemory m_hMemory;
5522 void* m_pMappedData;
5528 BlockAllocation m_BlockAllocation;
5530 DedicatedAllocation m_DedicatedAllocation;
5533 #if VMA_STATS_STRING_ENABLED
5534 uint32_t m_CreationFrameIndex;
5535 uint32_t m_BufferImageUsage;
5545 struct VmaSuballocation
5547 VkDeviceSize offset;
5550 VmaSuballocationType type;
5554 struct VmaSuballocationOffsetLess
5556 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5558 return lhs.offset < rhs.offset;
5561 struct VmaSuballocationOffsetGreater
5563 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5565 return lhs.offset > rhs.offset;
5569 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5572 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5574 enum class VmaAllocationRequestType
5596 struct VmaAllocationRequest
5598 VkDeviceSize offset;
5599 VkDeviceSize sumFreeSize;
5600 VkDeviceSize sumItemSize;
5601 VmaSuballocationList::iterator item;
5602 size_t itemsToMakeLostCount;
5604 VmaAllocationRequestType type;
5606 VkDeviceSize CalcCost()
const
5608 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5616 class VmaBlockMetadata
5620 virtual ~VmaBlockMetadata() { }
5621 virtual void Init(VkDeviceSize size) { m_Size = size; }
5624 virtual bool Validate()
const = 0;
5625 VkDeviceSize GetSize()
const {
return m_Size; }
5626 virtual size_t GetAllocationCount()
const = 0;
5627 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5628 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5630 virtual bool IsEmpty()
const = 0;
5632 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5634 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5636 #if VMA_STATS_STRING_ENABLED
5637 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5643 virtual bool CreateAllocationRequest(
5644 uint32_t currentFrameIndex,
5645 uint32_t frameInUseCount,
5646 VkDeviceSize bufferImageGranularity,
5647 VkDeviceSize allocSize,
5648 VkDeviceSize allocAlignment,
5650 VmaSuballocationType allocType,
5651 bool canMakeOtherLost,
5654 VmaAllocationRequest* pAllocationRequest) = 0;
5656 virtual bool MakeRequestedAllocationsLost(
5657 uint32_t currentFrameIndex,
5658 uint32_t frameInUseCount,
5659 VmaAllocationRequest* pAllocationRequest) = 0;
5661 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5663 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5667 const VmaAllocationRequest& request,
5668 VmaSuballocationType type,
5669 VkDeviceSize allocSize,
5674 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5677 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5679 #if VMA_STATS_STRING_ENABLED
5680 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5681 VkDeviceSize unusedBytes,
5682 size_t allocationCount,
5683 size_t unusedRangeCount)
const;
5684 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5685 VkDeviceSize offset,
5687 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5688 VkDeviceSize offset,
5689 VkDeviceSize size)
const;
5690 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5694 VkDeviceSize m_Size;
5695 const VkAllocationCallbacks* m_pAllocationCallbacks;
5698 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5699 VMA_ASSERT(0 && "Validation failed: " #cond); \
5703 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5705 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5708 virtual ~VmaBlockMetadata_Generic();
5709 virtual void Init(VkDeviceSize size);
5711 virtual bool Validate()
const;
5712 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5713 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5714 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5715 virtual bool IsEmpty()
const;
5717 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5718 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5720 #if VMA_STATS_STRING_ENABLED
5721 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5724 virtual bool CreateAllocationRequest(
5725 uint32_t currentFrameIndex,
5726 uint32_t frameInUseCount,
5727 VkDeviceSize bufferImageGranularity,
5728 VkDeviceSize allocSize,
5729 VkDeviceSize allocAlignment,
5731 VmaSuballocationType allocType,
5732 bool canMakeOtherLost,
5734 VmaAllocationRequest* pAllocationRequest);
5736 virtual bool MakeRequestedAllocationsLost(
5737 uint32_t currentFrameIndex,
5738 uint32_t frameInUseCount,
5739 VmaAllocationRequest* pAllocationRequest);
5741 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5743 virtual VkResult CheckCorruption(
const void* pBlockData);
5746 const VmaAllocationRequest& request,
5747 VmaSuballocationType type,
5748 VkDeviceSize allocSize,
5752 virtual void FreeAtOffset(VkDeviceSize offset);
5757 bool IsBufferImageGranularityConflictPossible(
5758 VkDeviceSize bufferImageGranularity,
5759 VmaSuballocationType& inOutPrevSuballocType)
const;
5762 friend class VmaDefragmentationAlgorithm_Generic;
5763 friend class VmaDefragmentationAlgorithm_Fast;
5765 uint32_t m_FreeCount;
5766 VkDeviceSize m_SumFreeSize;
5767 VmaSuballocationList m_Suballocations;
5770 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5772 bool ValidateFreeSuballocationList()
const;
5776 bool CheckAllocation(
5777 uint32_t currentFrameIndex,
5778 uint32_t frameInUseCount,
5779 VkDeviceSize bufferImageGranularity,
5780 VkDeviceSize allocSize,
5781 VkDeviceSize allocAlignment,
5782 VmaSuballocationType allocType,
5783 VmaSuballocationList::const_iterator suballocItem,
5784 bool canMakeOtherLost,
5785 VkDeviceSize* pOffset,
5786 size_t* itemsToMakeLostCount,
5787 VkDeviceSize* pSumFreeSize,
5788 VkDeviceSize* pSumItemSize)
const;
5790 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5794 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5797 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5800 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5881 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5883 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5886 virtual ~VmaBlockMetadata_Linear();
5887 virtual void Init(VkDeviceSize size);
5889 virtual bool Validate()
const;
5890 virtual size_t GetAllocationCount()
const;
5891 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5892 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5893 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5895 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5896 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5898 #if VMA_STATS_STRING_ENABLED
5899 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5902 virtual bool CreateAllocationRequest(
5903 uint32_t currentFrameIndex,
5904 uint32_t frameInUseCount,
5905 VkDeviceSize bufferImageGranularity,
5906 VkDeviceSize allocSize,
5907 VkDeviceSize allocAlignment,
5909 VmaSuballocationType allocType,
5910 bool canMakeOtherLost,
5912 VmaAllocationRequest* pAllocationRequest);
5914 virtual bool MakeRequestedAllocationsLost(
5915 uint32_t currentFrameIndex,
5916 uint32_t frameInUseCount,
5917 VmaAllocationRequest* pAllocationRequest);
5919 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5921 virtual VkResult CheckCorruption(
const void* pBlockData);
5924 const VmaAllocationRequest& request,
5925 VmaSuballocationType type,
5926 VkDeviceSize allocSize,
5930 virtual void FreeAtOffset(VkDeviceSize offset);
5940 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5942 enum SECOND_VECTOR_MODE
5944 SECOND_VECTOR_EMPTY,
5949 SECOND_VECTOR_RING_BUFFER,
5955 SECOND_VECTOR_DOUBLE_STACK,
5958 VkDeviceSize m_SumFreeSize;
5959 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5960 uint32_t m_1stVectorIndex;
5961 SECOND_VECTOR_MODE m_2ndVectorMode;
5963 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5964 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5965 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5966 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5969 size_t m_1stNullItemsBeginCount;
5971 size_t m_1stNullItemsMiddleCount;
5973 size_t m_2ndNullItemsCount;
5975 bool ShouldCompact1st()
const;
5976 void CleanupAfterFree();
5978 bool CreateAllocationRequest_LowerAddress(
5979 uint32_t currentFrameIndex,
5980 uint32_t frameInUseCount,
5981 VkDeviceSize bufferImageGranularity,
5982 VkDeviceSize allocSize,
5983 VkDeviceSize allocAlignment,
5984 VmaSuballocationType allocType,
5985 bool canMakeOtherLost,
5987 VmaAllocationRequest* pAllocationRequest);
5988 bool CreateAllocationRequest_UpperAddress(
5989 uint32_t currentFrameIndex,
5990 uint32_t frameInUseCount,
5991 VkDeviceSize bufferImageGranularity,
5992 VkDeviceSize allocSize,
5993 VkDeviceSize allocAlignment,
5994 VmaSuballocationType allocType,
5995 bool canMakeOtherLost,
5997 VmaAllocationRequest* pAllocationRequest);
6011 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6013 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6016 virtual ~VmaBlockMetadata_Buddy();
6017 virtual void Init(VkDeviceSize size);
6019 virtual bool Validate()
const;
6020 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6021 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6022 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6023 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6025 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6026 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6028 #if VMA_STATS_STRING_ENABLED
6029 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6032 virtual bool CreateAllocationRequest(
6033 uint32_t currentFrameIndex,
6034 uint32_t frameInUseCount,
6035 VkDeviceSize bufferImageGranularity,
6036 VkDeviceSize allocSize,
6037 VkDeviceSize allocAlignment,
6039 VmaSuballocationType allocType,
6040 bool canMakeOtherLost,
6042 VmaAllocationRequest* pAllocationRequest);
6044 virtual bool MakeRequestedAllocationsLost(
6045 uint32_t currentFrameIndex,
6046 uint32_t frameInUseCount,
6047 VmaAllocationRequest* pAllocationRequest);
6049 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6051 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6054 const VmaAllocationRequest& request,
6055 VmaSuballocationType type,
6056 VkDeviceSize allocSize,
6059 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6060 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6063 static const VkDeviceSize MIN_NODE_SIZE = 32;
6064 static const size_t MAX_LEVELS = 30;
6066 struct ValidationContext
6068 size_t calculatedAllocationCount;
6069 size_t calculatedFreeCount;
6070 VkDeviceSize calculatedSumFreeSize;
6072 ValidationContext() :
6073 calculatedAllocationCount(0),
6074 calculatedFreeCount(0),
6075 calculatedSumFreeSize(0) { }
6080 VkDeviceSize offset;
6110 VkDeviceSize m_UsableSize;
6111 uint32_t m_LevelCount;
6117 } m_FreeList[MAX_LEVELS];
6119 size_t m_AllocationCount;
6123 VkDeviceSize m_SumFreeSize;
6125 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6126 void DeleteNode(Node* node);
6127 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6128 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6129 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6131 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6132 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6136 void AddToFreeListFront(uint32_t level, Node* node);
6140 void RemoveFromFreeList(uint32_t level, Node* node);
6142 #if VMA_STATS_STRING_ENABLED
6143 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6153 class VmaDeviceMemoryBlock
6155 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6157 VmaBlockMetadata* m_pMetadata;
6161 ~VmaDeviceMemoryBlock()
6163 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6164 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6171 uint32_t newMemoryTypeIndex,
6172 VkDeviceMemory newMemory,
6173 VkDeviceSize newSize,
6175 uint32_t algorithm);
6179 VmaPool GetParentPool()
const {
return m_hParentPool; }
6180 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6181 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6182 uint32_t GetId()
const {
return m_Id; }
6183 void* GetMappedData()
const {
return m_pMappedData; }
6186 bool Validate()
const;
6191 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6194 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6195 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6197 VkResult BindBufferMemory(
6200 VkDeviceSize allocationLocalOffset,
6203 VkResult BindImageMemory(
6206 VkDeviceSize allocationLocalOffset,
6212 uint32_t m_MemoryTypeIndex;
6214 VkDeviceMemory m_hMemory;
6222 uint32_t m_MapCount;
6223 void* m_pMappedData;
6226 struct VmaPointerLess
6228 bool operator()(
const void* lhs,
const void* rhs)
const
6234 struct VmaDefragmentationMove
6236 size_t srcBlockIndex;
6237 size_t dstBlockIndex;
6238 VkDeviceSize srcOffset;
6239 VkDeviceSize dstOffset;
6243 class VmaDefragmentationAlgorithm;
6251 struct VmaBlockVector
6253 VMA_CLASS_NO_COPY(VmaBlockVector)
6258 uint32_t memoryTypeIndex,
6259 VkDeviceSize preferredBlockSize,
6260 size_t minBlockCount,
6261 size_t maxBlockCount,
6262 VkDeviceSize bufferImageGranularity,
6263 uint32_t frameInUseCount,
6264 bool explicitBlockSize,
6265 uint32_t algorithm);
6268 VkResult CreateMinBlocks();
6270 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6271 VmaPool GetParentPool()
const {
return m_hParentPool; }
6272 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6273 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6274 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6275 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6276 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6277 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6281 bool IsEmpty()
const {
return m_Blocks.empty(); }
6282 bool IsCorruptionDetectionEnabled()
const;
6285 uint32_t currentFrameIndex,
6287 VkDeviceSize alignment,
6289 VmaSuballocationType suballocType,
6290 size_t allocationCount,
6298 #if VMA_STATS_STRING_ENABLED
6299 void PrintDetailedMap(
class VmaJsonWriter& json);
6302 void MakePoolAllocationsLost(
6303 uint32_t currentFrameIndex,
6304 size_t* pLostAllocationCount);
6305 VkResult CheckCorruption();
6309 class VmaBlockVectorDefragmentationContext* pCtx,
6311 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6312 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6313 VkCommandBuffer commandBuffer);
6314 void DefragmentationEnd(
6315 class VmaBlockVectorDefragmentationContext* pCtx,
6321 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6322 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6323 size_t CalcAllocationCount()
const;
6324 bool IsBufferImageGranularityConflictPossible()
const;
6327 friend class VmaDefragmentationAlgorithm_Generic;
6331 const uint32_t m_MemoryTypeIndex;
6332 const VkDeviceSize m_PreferredBlockSize;
6333 const size_t m_MinBlockCount;
6334 const size_t m_MaxBlockCount;
6335 const VkDeviceSize m_BufferImageGranularity;
6336 const uint32_t m_FrameInUseCount;
6337 const bool m_ExplicitBlockSize;
6338 const uint32_t m_Algorithm;
6339 VMA_RW_MUTEX m_Mutex;
6343 bool m_HasEmptyBlock;
6345 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6346 uint32_t m_NextBlockId;
6348 VkDeviceSize CalcMaxBlockSize()
const;
6351 void Remove(VmaDeviceMemoryBlock* pBlock);
6355 void IncrementallySortBlocks();
6357 VkResult AllocatePage(
6358 uint32_t currentFrameIndex,
6360 VkDeviceSize alignment,
6362 VmaSuballocationType suballocType,
6366 VkResult AllocateFromBlock(
6367 VmaDeviceMemoryBlock* pBlock,
6368 uint32_t currentFrameIndex,
6370 VkDeviceSize alignment,
6373 VmaSuballocationType suballocType,
6377 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6380 void ApplyDefragmentationMovesCpu(
6381 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6382 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6384 void ApplyDefragmentationMovesGpu(
6385 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6386 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6387 VkCommandBuffer commandBuffer);
6395 void UpdateHasEmptyBlock();
6400 VMA_CLASS_NO_COPY(VmaPool_T)
6402 VmaBlockVector m_BlockVector;
6407 VkDeviceSize preferredBlockSize);
6410 uint32_t GetId()
const {
return m_Id; }
6411 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6413 const char* GetName()
const {
return m_Name; }
6414 void SetName(
const char* pName);
6416 #if VMA_STATS_STRING_ENABLED
6432 class VmaDefragmentationAlgorithm
6434 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6436 VmaDefragmentationAlgorithm(
6438 VmaBlockVector* pBlockVector,
6439 uint32_t currentFrameIndex) :
6440 m_hAllocator(hAllocator),
6441 m_pBlockVector(pBlockVector),
6442 m_CurrentFrameIndex(currentFrameIndex)
6445 virtual ~VmaDefragmentationAlgorithm()
6449 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6450 virtual void AddAll() = 0;
6452 virtual VkResult Defragment(
6453 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6454 VkDeviceSize maxBytesToMove,
6455 uint32_t maxAllocationsToMove) = 0;
6457 virtual VkDeviceSize GetBytesMoved()
const = 0;
6458 virtual uint32_t GetAllocationsMoved()
const = 0;
6462 VmaBlockVector*
const m_pBlockVector;
6463 const uint32_t m_CurrentFrameIndex;
6465 struct AllocationInfo
6468 VkBool32* m_pChanged;
6471 m_hAllocation(VK_NULL_HANDLE),
6472 m_pChanged(VMA_NULL)
6476 m_hAllocation(hAlloc),
6477 m_pChanged(pChanged)
6483 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6485 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6487 VmaDefragmentationAlgorithm_Generic(
6489 VmaBlockVector* pBlockVector,
6490 uint32_t currentFrameIndex,
6491 bool overlappingMoveSupported);
6492 virtual ~VmaDefragmentationAlgorithm_Generic();
6494 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6495 virtual void AddAll() { m_AllAllocations =
true; }
6497 virtual VkResult Defragment(
6498 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6499 VkDeviceSize maxBytesToMove,
6500 uint32_t maxAllocationsToMove);
6502 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6503 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6506 uint32_t m_AllocationCount;
6507 bool m_AllAllocations;
6509 VkDeviceSize m_BytesMoved;
6510 uint32_t m_AllocationsMoved;
6512 struct AllocationInfoSizeGreater
6514 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6516 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6520 struct AllocationInfoOffsetGreater
6522 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6524 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6530 size_t m_OriginalBlockIndex;
6531 VmaDeviceMemoryBlock* m_pBlock;
6532 bool m_HasNonMovableAllocations;
6533 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6535 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6536 m_OriginalBlockIndex(SIZE_MAX),
6538 m_HasNonMovableAllocations(true),
6539 m_Allocations(pAllocationCallbacks)
6543 void CalcHasNonMovableAllocations()
6545 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6546 const size_t defragmentAllocCount = m_Allocations.size();
6547 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6550 void SortAllocationsBySizeDescending()
6552 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6555 void SortAllocationsByOffsetDescending()
6557 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6561 struct BlockPointerLess
6563 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6565 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6567 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6569 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6575 struct BlockInfoCompareMoveDestination
6577 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6579 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6583 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6587 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6595 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6596 BlockInfoVector m_Blocks;
6598 VkResult DefragmentRound(
6599 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6600 VkDeviceSize maxBytesToMove,
6601 uint32_t maxAllocationsToMove);
6603 size_t CalcBlocksWithNonMovableCount()
const;
6605 static bool MoveMakesSense(
6606 size_t dstBlockIndex, VkDeviceSize dstOffset,
6607 size_t srcBlockIndex, VkDeviceSize srcOffset);
6610 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6612 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6614 VmaDefragmentationAlgorithm_Fast(
6616 VmaBlockVector* pBlockVector,
6617 uint32_t currentFrameIndex,
6618 bool overlappingMoveSupported);
6619 virtual ~VmaDefragmentationAlgorithm_Fast();
6621 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6622 virtual void AddAll() { m_AllAllocations =
true; }
6624 virtual VkResult Defragment(
6625 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6626 VkDeviceSize maxBytesToMove,
6627 uint32_t maxAllocationsToMove);
6629 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6630 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6635 size_t origBlockIndex;
6638 class FreeSpaceDatabase
6644 s.blockInfoIndex = SIZE_MAX;
6645 for(
size_t i = 0; i < MAX_COUNT; ++i)
6647 m_FreeSpaces[i] = s;
6651 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6653 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6659 size_t bestIndex = SIZE_MAX;
6660 for(
size_t i = 0; i < MAX_COUNT; ++i)
6663 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6668 if(m_FreeSpaces[i].size < size &&
6669 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6675 if(bestIndex != SIZE_MAX)
6677 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6678 m_FreeSpaces[bestIndex].offset = offset;
6679 m_FreeSpaces[bestIndex].size = size;
6683 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6684 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6686 size_t bestIndex = SIZE_MAX;
6687 VkDeviceSize bestFreeSpaceAfter = 0;
6688 for(
size_t i = 0; i < MAX_COUNT; ++i)
6691 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6693 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6695 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6697 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6699 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6702 bestFreeSpaceAfter = freeSpaceAfter;
6708 if(bestIndex != SIZE_MAX)
6710 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6711 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6713 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6716 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6717 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6718 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6723 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6733 static const size_t MAX_COUNT = 4;
6737 size_t blockInfoIndex;
6738 VkDeviceSize offset;
6740 } m_FreeSpaces[MAX_COUNT];
6743 const bool m_OverlappingMoveSupported;
6745 uint32_t m_AllocationCount;
6746 bool m_AllAllocations;
6748 VkDeviceSize m_BytesMoved;
6749 uint32_t m_AllocationsMoved;
6751 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6753 void PreprocessMetadata();
6754 void PostprocessMetadata();
6755 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6758 struct VmaBlockDefragmentationContext
6762 BLOCK_FLAG_USED = 0x00000001,
6768 class VmaBlockVectorDefragmentationContext
6770 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6774 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6776 VmaBlockVectorDefragmentationContext(
6779 VmaBlockVector* pBlockVector,
6780 uint32_t currFrameIndex);
6781 ~VmaBlockVectorDefragmentationContext();
6783 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6784 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6785 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6787 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6788 void AddAll() { m_AllAllocations =
true; }
6790 void Begin(
bool overlappingMoveSupported);
6797 VmaBlockVector*
const m_pBlockVector;
6798 const uint32_t m_CurrFrameIndex;
6800 VmaDefragmentationAlgorithm* m_pAlgorithm;
6808 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6809 bool m_AllAllocations;
6812 struct VmaDefragmentationContext_T
6815 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6817 VmaDefragmentationContext_T(
6819 uint32_t currFrameIndex,
6822 ~VmaDefragmentationContext_T();
6824 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6825 void AddAllocations(
6826 uint32_t allocationCount,
6828 VkBool32* pAllocationsChanged);
6836 VkResult Defragment(
6837 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6838 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6843 const uint32_t m_CurrFrameIndex;
6844 const uint32_t m_Flags;
6847 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6849 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6852 #if VMA_RECORDING_ENABLED
6859 void WriteConfiguration(
6860 const VkPhysicalDeviceProperties& devProps,
6861 const VkPhysicalDeviceMemoryProperties& memProps,
6862 uint32_t vulkanApiVersion,
6863 bool dedicatedAllocationExtensionEnabled,
6864 bool bindMemory2ExtensionEnabled,
6865 bool memoryBudgetExtensionEnabled);
6868 void RecordCreateAllocator(uint32_t frameIndex);
6869 void RecordDestroyAllocator(uint32_t frameIndex);
6870 void RecordCreatePool(uint32_t frameIndex,
6873 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6874 void RecordAllocateMemory(uint32_t frameIndex,
6875 const VkMemoryRequirements& vkMemReq,
6878 void RecordAllocateMemoryPages(uint32_t frameIndex,
6879 const VkMemoryRequirements& vkMemReq,
6881 uint64_t allocationCount,
6883 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6884 const VkMemoryRequirements& vkMemReq,
6885 bool requiresDedicatedAllocation,
6886 bool prefersDedicatedAllocation,
6889 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6890 const VkMemoryRequirements& vkMemReq,
6891 bool requiresDedicatedAllocation,
6892 bool prefersDedicatedAllocation,
6895 void RecordFreeMemory(uint32_t frameIndex,
6897 void RecordFreeMemoryPages(uint32_t frameIndex,
6898 uint64_t allocationCount,
6900 void RecordSetAllocationUserData(uint32_t frameIndex,
6902 const void* pUserData);
6903 void RecordCreateLostAllocation(uint32_t frameIndex,
6905 void RecordMapMemory(uint32_t frameIndex,
6907 void RecordUnmapMemory(uint32_t frameIndex,
6909 void RecordFlushAllocation(uint32_t frameIndex,
6910 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6911 void RecordInvalidateAllocation(uint32_t frameIndex,
6912 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6913 void RecordCreateBuffer(uint32_t frameIndex,
6914 const VkBufferCreateInfo& bufCreateInfo,
6917 void RecordCreateImage(uint32_t frameIndex,
6918 const VkImageCreateInfo& imageCreateInfo,
6921 void RecordDestroyBuffer(uint32_t frameIndex,
6923 void RecordDestroyImage(uint32_t frameIndex,
6925 void RecordTouchAllocation(uint32_t frameIndex,
6927 void RecordGetAllocationInfo(uint32_t frameIndex,
6929 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6931 void RecordDefragmentationBegin(uint32_t frameIndex,
6934 void RecordDefragmentationEnd(uint32_t frameIndex,
6936 void RecordSetPoolName(uint32_t frameIndex,
6947 class UserDataString
6951 const char* GetString()
const {
return m_Str; }
6961 VMA_MUTEX m_FileMutex;
6963 int64_t m_StartCounter;
6965 void GetBasicParams(CallParams& outParams);
6968 template<
typename T>
6969 void PrintPointerList(uint64_t count,
const T* pItems)
6973 fprintf(m_File,
"%p", pItems[0]);
6974 for(uint64_t i = 1; i < count; ++i)
6976 fprintf(m_File,
" %p", pItems[i]);
6981 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6985 #endif // #if VMA_RECORDING_ENABLED
6990 class VmaAllocationObjectAllocator
6992 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6994 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7001 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7004 struct VmaCurrentBudgetData
7006 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7007 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7009 #if VMA_MEMORY_BUDGET
7010 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7011 VMA_RW_MUTEX m_BudgetMutex;
7012 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7013 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7014 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7015 #endif // #if VMA_MEMORY_BUDGET
7017 VmaCurrentBudgetData()
7019 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7021 m_BlockBytes[heapIndex] = 0;
7022 m_AllocationBytes[heapIndex] = 0;
7023 #if VMA_MEMORY_BUDGET
7024 m_VulkanUsage[heapIndex] = 0;
7025 m_VulkanBudget[heapIndex] = 0;
7026 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7030 #if VMA_MEMORY_BUDGET
7031 m_OperationsSinceBudgetFetch = 0;
7035 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7037 m_AllocationBytes[heapIndex] += allocationSize;
7038 #if VMA_MEMORY_BUDGET
7039 ++m_OperationsSinceBudgetFetch;
7043 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7045 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7046 m_AllocationBytes[heapIndex] -= allocationSize;
7047 #if VMA_MEMORY_BUDGET
7048 ++m_OperationsSinceBudgetFetch;
7054 struct VmaAllocator_T
7056 VMA_CLASS_NO_COPY(VmaAllocator_T)
7059 uint32_t m_VulkanApiVersion;
7060 bool m_UseKhrDedicatedAllocation;
7061 bool m_UseKhrBindMemory2;
7062 bool m_UseExtMemoryBudget;
7064 VkInstance m_hInstance;
7065 bool m_AllocationCallbacksSpecified;
7066 VkAllocationCallbacks m_AllocationCallbacks;
7068 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7071 uint32_t m_HeapSizeLimitMask;
7073 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7074 VkPhysicalDeviceMemoryProperties m_MemProps;
7077 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7080 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7081 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7082 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7084 VmaCurrentBudgetData m_Budget;
7090 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7092 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7096 return m_VulkanFunctions;
7099 VkDeviceSize GetBufferImageGranularity()
const
7102 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7103 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7106 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7107 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7109 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7111 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7112 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7115 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7117 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7118 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7121 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7123 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7124 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7125 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7128 bool IsIntegratedGpu()
const
7130 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7133 #if VMA_RECORDING_ENABLED
7134 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7137 void GetBufferMemoryRequirements(
7139 VkMemoryRequirements& memReq,
7140 bool& requiresDedicatedAllocation,
7141 bool& prefersDedicatedAllocation)
const;
7142 void GetImageMemoryRequirements(
7144 VkMemoryRequirements& memReq,
7145 bool& requiresDedicatedAllocation,
7146 bool& prefersDedicatedAllocation)
const;
7149 VkResult AllocateMemory(
7150 const VkMemoryRequirements& vkMemReq,
7151 bool requiresDedicatedAllocation,
7152 bool prefersDedicatedAllocation,
7153 VkBuffer dedicatedBuffer,
7154 VkImage dedicatedImage,
7156 VmaSuballocationType suballocType,
7157 size_t allocationCount,
7162 size_t allocationCount,
7165 VkResult ResizeAllocation(
7167 VkDeviceSize newSize);
7169 void CalculateStats(
VmaStats* pStats);
7172 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7174 #if VMA_STATS_STRING_ENABLED
7175 void PrintDetailedMap(
class VmaJsonWriter& json);
7178 VkResult DefragmentationBegin(
7182 VkResult DefragmentationEnd(
7189 void DestroyPool(
VmaPool pool);
7192 void SetCurrentFrameIndex(uint32_t frameIndex);
7193 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7195 void MakePoolAllocationsLost(
7197 size_t* pLostAllocationCount);
7198 VkResult CheckPoolCorruption(
VmaPool hPool);
7199 VkResult CheckCorruption(uint32_t memoryTypeBits);
7204 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7206 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7208 VkResult BindVulkanBuffer(
7209 VkDeviceMemory memory,
7210 VkDeviceSize memoryOffset,
7214 VkResult BindVulkanImage(
7215 VkDeviceMemory memory,
7216 VkDeviceSize memoryOffset,
7223 VkResult BindBufferMemory(
7225 VkDeviceSize allocationLocalOffset,
7228 VkResult BindImageMemory(
7230 VkDeviceSize allocationLocalOffset,
7234 void FlushOrInvalidateAllocation(
7236 VkDeviceSize offset, VkDeviceSize size,
7237 VMA_CACHE_OPERATION op);
7239 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7245 uint32_t GetGpuDefragmentationMemoryTypeBits();
7248 VkDeviceSize m_PreferredLargeHeapBlockSize;
7250 VkPhysicalDevice m_PhysicalDevice;
7251 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7252 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7254 VMA_RW_MUTEX m_PoolsMutex;
7256 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7257 uint32_t m_NextPoolId;
7261 #if VMA_RECORDING_ENABLED
7262 VmaRecorder* m_pRecorder;
7267 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7269 VkResult AllocateMemoryOfType(
7271 VkDeviceSize alignment,
7272 bool dedicatedAllocation,
7273 VkBuffer dedicatedBuffer,
7274 VkImage dedicatedImage,
7276 uint32_t memTypeIndex,
7277 VmaSuballocationType suballocType,
7278 size_t allocationCount,
7282 VkResult AllocateDedicatedMemoryPage(
7284 VmaSuballocationType suballocType,
7285 uint32_t memTypeIndex,
7286 const VkMemoryAllocateInfo& allocInfo,
7288 bool isUserDataString,
7293 VkResult AllocateDedicatedMemory(
7295 VmaSuballocationType suballocType,
7296 uint32_t memTypeIndex,
7299 bool isUserDataString,
7301 VkBuffer dedicatedBuffer,
7302 VkImage dedicatedImage,
7303 size_t allocationCount,
7312 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7314 #if VMA_MEMORY_BUDGET
7315 void UpdateVulkanBudget();
7316 #endif // #if VMA_MEMORY_BUDGET
7322 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7324 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7327 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7329 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7332 template<
typename T>
7335 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7338 template<
typename T>
7339 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7341 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7344 template<
typename T>
7345 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7350 VmaFree(hAllocator, ptr);
7354 template<
typename T>
7355 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7359 for(
size_t i = count; i--; )
7361 VmaFree(hAllocator, ptr);
7368 #if VMA_STATS_STRING_ENABLED
7370 class VmaStringBuilder
7373 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7374 size_t GetLength()
const {
return m_Data.size(); }
7375 const char* GetData()
const {
return m_Data.data(); }
7377 void Add(
char ch) { m_Data.push_back(ch); }
7378 void Add(
const char* pStr);
7379 void AddNewLine() { Add(
'\n'); }
7380 void AddNumber(uint32_t num);
7381 void AddNumber(uint64_t num);
7382 void AddPointer(
const void* ptr);
7385 VmaVector< char, VmaStlAllocator<char> > m_Data;
7388 void VmaStringBuilder::Add(
const char* pStr)
7390 const size_t strLen = strlen(pStr);
7393 const size_t oldCount = m_Data.size();
7394 m_Data.resize(oldCount + strLen);
7395 memcpy(m_Data.data() + oldCount, pStr, strLen);
7399 void VmaStringBuilder::AddNumber(uint32_t num)
7406 *--p =
'0' + (num % 10);
7413 void VmaStringBuilder::AddNumber(uint64_t num)
7420 *--p =
'0' + (num % 10);
7427 void VmaStringBuilder::AddPointer(
const void* ptr)
7430 VmaPtrToStr(buf,
sizeof(buf), ptr);
7434 #endif // #if VMA_STATS_STRING_ENABLED
7439 #if VMA_STATS_STRING_ENABLED
7443 VMA_CLASS_NO_COPY(VmaJsonWriter)
7445 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7448 void BeginObject(
bool singleLine =
false);
7451 void BeginArray(
bool singleLine =
false);
7454 void WriteString(
const char* pStr);
7455 void BeginString(
const char* pStr = VMA_NULL);
7456 void ContinueString(
const char* pStr);
7457 void ContinueString(uint32_t n);
7458 void ContinueString(uint64_t n);
7459 void ContinueString_Pointer(
const void* ptr);
7460 void EndString(
const char* pStr = VMA_NULL);
7462 void WriteNumber(uint32_t n);
7463 void WriteNumber(uint64_t n);
7464 void WriteBool(
bool b);
7468 static const char*
const INDENT;
7470 enum COLLECTION_TYPE
7472 COLLECTION_TYPE_OBJECT,
7473 COLLECTION_TYPE_ARRAY,
7477 COLLECTION_TYPE type;
7478 uint32_t valueCount;
7479 bool singleLineMode;
7482 VmaStringBuilder& m_SB;
7483 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7484 bool m_InsideString;
7486 void BeginValue(
bool isString);
7487 void WriteIndent(
bool oneLess =
false);
7490 const char*
const VmaJsonWriter::INDENT =
" ";
7492 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7494 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7495 m_InsideString(false)
7499 VmaJsonWriter::~VmaJsonWriter()
7501 VMA_ASSERT(!m_InsideString);
7502 VMA_ASSERT(m_Stack.empty());
7505 void VmaJsonWriter::BeginObject(
bool singleLine)
7507 VMA_ASSERT(!m_InsideString);
7513 item.type = COLLECTION_TYPE_OBJECT;
7514 item.valueCount = 0;
7515 item.singleLineMode = singleLine;
7516 m_Stack.push_back(item);
7519 void VmaJsonWriter::EndObject()
7521 VMA_ASSERT(!m_InsideString);
7526 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7530 void VmaJsonWriter::BeginArray(
bool singleLine)
7532 VMA_ASSERT(!m_InsideString);
7538 item.type = COLLECTION_TYPE_ARRAY;
7539 item.valueCount = 0;
7540 item.singleLineMode = singleLine;
7541 m_Stack.push_back(item);
7544 void VmaJsonWriter::EndArray()
7546 VMA_ASSERT(!m_InsideString);
7551 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7555 void VmaJsonWriter::WriteString(
const char* pStr)
7561 void VmaJsonWriter::BeginString(
const char* pStr)
7563 VMA_ASSERT(!m_InsideString);
7567 m_InsideString =
true;
7568 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7570 ContinueString(pStr);
7574 void VmaJsonWriter::ContinueString(
const char* pStr)
7576 VMA_ASSERT(m_InsideString);
7578 const size_t strLen = strlen(pStr);
7579 for(
size_t i = 0; i < strLen; ++i)
7612 VMA_ASSERT(0 &&
"Character not currently supported.");
7618 void VmaJsonWriter::ContinueString(uint32_t n)
7620 VMA_ASSERT(m_InsideString);
7624 void VmaJsonWriter::ContinueString(uint64_t n)
7626 VMA_ASSERT(m_InsideString);
7630 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7632 VMA_ASSERT(m_InsideString);
7633 m_SB.AddPointer(ptr);
7636 void VmaJsonWriter::EndString(
const char* pStr)
7638 VMA_ASSERT(m_InsideString);
7639 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7641 ContinueString(pStr);
7644 m_InsideString =
false;
7647 void VmaJsonWriter::WriteNumber(uint32_t n)
7649 VMA_ASSERT(!m_InsideString);
7654 void VmaJsonWriter::WriteNumber(uint64_t n)
7656 VMA_ASSERT(!m_InsideString);
7661 void VmaJsonWriter::WriteBool(
bool b)
7663 VMA_ASSERT(!m_InsideString);
7665 m_SB.Add(b ?
"true" :
"false");
7668 void VmaJsonWriter::WriteNull()
7670 VMA_ASSERT(!m_InsideString);
7675 void VmaJsonWriter::BeginValue(
bool isString)
7677 if(!m_Stack.empty())
7679 StackItem& currItem = m_Stack.back();
7680 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7681 currItem.valueCount % 2 == 0)
7683 VMA_ASSERT(isString);
7686 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7687 currItem.valueCount % 2 != 0)
7691 else if(currItem.valueCount > 0)
7700 ++currItem.valueCount;
7704 void VmaJsonWriter::WriteIndent(
bool oneLess)
7706 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7710 size_t count = m_Stack.size();
7711 if(count > 0 && oneLess)
7715 for(
size_t i = 0; i < count; ++i)
7722 #endif // #if VMA_STATS_STRING_ENABLED
7726 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7728 if(IsUserDataString())
7730 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7732 FreeUserDataString(hAllocator);
7734 if(pUserData != VMA_NULL)
7736 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7741 m_pUserData = pUserData;
7745 void VmaAllocation_T::ChangeBlockAllocation(
7747 VmaDeviceMemoryBlock* block,
7748 VkDeviceSize offset)
7750 VMA_ASSERT(block != VMA_NULL);
7751 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7754 if(block != m_BlockAllocation.m_Block)
7756 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7757 if(IsPersistentMap())
7759 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7760 block->Map(hAllocator, mapRefCount, VMA_NULL);
7763 m_BlockAllocation.m_Block = block;
7764 m_BlockAllocation.m_Offset = offset;
7767 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7769 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7770 m_BlockAllocation.m_Offset = newOffset;
7773 VkDeviceSize VmaAllocation_T::GetOffset()
const
7777 case ALLOCATION_TYPE_BLOCK:
7778 return m_BlockAllocation.m_Offset;
7779 case ALLOCATION_TYPE_DEDICATED:
7787 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7791 case ALLOCATION_TYPE_BLOCK:
7792 return m_BlockAllocation.m_Block->GetDeviceMemory();
7793 case ALLOCATION_TYPE_DEDICATED:
7794 return m_DedicatedAllocation.m_hMemory;
7797 return VK_NULL_HANDLE;
7801 void* VmaAllocation_T::GetMappedData()
const
7805 case ALLOCATION_TYPE_BLOCK:
7808 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7809 VMA_ASSERT(pBlockData != VMA_NULL);
7810 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7817 case ALLOCATION_TYPE_DEDICATED:
7818 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7819 return m_DedicatedAllocation.m_pMappedData;
7826 bool VmaAllocation_T::CanBecomeLost()
const
7830 case ALLOCATION_TYPE_BLOCK:
7831 return m_BlockAllocation.m_CanBecomeLost;
7832 case ALLOCATION_TYPE_DEDICATED:
7840 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7842 VMA_ASSERT(CanBecomeLost());
7848 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7851 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7856 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7862 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7872 #if VMA_STATS_STRING_ENABLED
7875 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7884 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
7886 json.WriteString(
"Type");
7887 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7889 json.WriteString(
"Size");
7890 json.WriteNumber(m_Size);
7892 if(m_pUserData != VMA_NULL)
7894 json.WriteString(
"UserData");
7895 if(IsUserDataString())
7897 json.WriteString((
const char*)m_pUserData);
7902 json.ContinueString_Pointer(m_pUserData);
7907 json.WriteString(
"CreationFrameIndex");
7908 json.WriteNumber(m_CreationFrameIndex);
7910 json.WriteString(
"LastUseFrameIndex");
7911 json.WriteNumber(GetLastUseFrameIndex());
7913 if(m_BufferImageUsage != 0)
7915 json.WriteString(
"Usage");
7916 json.WriteNumber(m_BufferImageUsage);
7922 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7924 VMA_ASSERT(IsUserDataString());
7925 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
7926 m_pUserData = VMA_NULL;
7929 void VmaAllocation_T::BlockAllocMap()
7931 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7933 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7939 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7943 void VmaAllocation_T::BlockAllocUnmap()
7945 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7947 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7953 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7957 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7959 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7963 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7965 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7966 *ppData = m_DedicatedAllocation.m_pMappedData;
7972 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7973 return VK_ERROR_MEMORY_MAP_FAILED;
7978 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7979 hAllocator->m_hDevice,
7980 m_DedicatedAllocation.m_hMemory,
7985 if(result == VK_SUCCESS)
7987 m_DedicatedAllocation.m_pMappedData = *ppData;
7994 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7996 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7998 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8003 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8004 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8005 hAllocator->m_hDevice,
8006 m_DedicatedAllocation.m_hMemory);
8011 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8015 #if VMA_STATS_STRING_ENABLED
8017 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8021 json.WriteString(
"Blocks");
8024 json.WriteString(
"Allocations");
8027 json.WriteString(
"UnusedRanges");
8030 json.WriteString(
"UsedBytes");
8033 json.WriteString(
"UnusedBytes");
8038 json.WriteString(
"AllocationSize");
8039 json.BeginObject(
true);
8040 json.WriteString(
"Min");
8042 json.WriteString(
"Avg");
8044 json.WriteString(
"Max");
8051 json.WriteString(
"UnusedRangeSize");
8052 json.BeginObject(
true);
8053 json.WriteString(
"Min");
8055 json.WriteString(
"Avg");
8057 json.WriteString(
"Max");
8065 #endif // #if VMA_STATS_STRING_ENABLED
8067 struct VmaSuballocationItemSizeLess
8070 const VmaSuballocationList::iterator lhs,
8071 const VmaSuballocationList::iterator rhs)
const
8073 return lhs->size < rhs->size;
8076 const VmaSuballocationList::iterator lhs,
8077 VkDeviceSize rhsSize)
const
8079 return lhs->size < rhsSize;
8087 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8089 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8093 #if VMA_STATS_STRING_ENABLED
8095 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8096 VkDeviceSize unusedBytes,
8097 size_t allocationCount,
8098 size_t unusedRangeCount)
const
8102 json.WriteString(
"TotalBytes");
8103 json.WriteNumber(GetSize());
8105 json.WriteString(
"UnusedBytes");
8106 json.WriteNumber(unusedBytes);
8108 json.WriteString(
"Allocations");
8109 json.WriteNumber((uint64_t)allocationCount);
8111 json.WriteString(
"UnusedRanges");
8112 json.WriteNumber((uint64_t)unusedRangeCount);
8114 json.WriteString(
"Suballocations");
8118 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8119 VkDeviceSize offset,
8122 json.BeginObject(
true);
8124 json.WriteString(
"Offset");
8125 json.WriteNumber(offset);
8127 hAllocation->PrintParameters(json);
8132 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8133 VkDeviceSize offset,
8134 VkDeviceSize size)
const
8136 json.BeginObject(
true);
8138 json.WriteString(
"Offset");
8139 json.WriteNumber(offset);
8141 json.WriteString(
"Type");
8142 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8144 json.WriteString(
"Size");
8145 json.WriteNumber(size);
8150 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8156 #endif // #if VMA_STATS_STRING_ENABLED
8161 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8162 VmaBlockMetadata(hAllocator),
8165 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8166 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8170 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8174 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8176 VmaBlockMetadata::Init(size);
8179 m_SumFreeSize = size;
8181 VmaSuballocation suballoc = {};
8182 suballoc.offset = 0;
8183 suballoc.size = size;
8184 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8185 suballoc.hAllocation = VK_NULL_HANDLE;
8187 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8188 m_Suballocations.push_back(suballoc);
8189 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8191 m_FreeSuballocationsBySize.push_back(suballocItem);
8194 bool VmaBlockMetadata_Generic::Validate()
const
8196 VMA_VALIDATE(!m_Suballocations.empty());
8199 VkDeviceSize calculatedOffset = 0;
8201 uint32_t calculatedFreeCount = 0;
8203 VkDeviceSize calculatedSumFreeSize = 0;
8206 size_t freeSuballocationsToRegister = 0;
8208 bool prevFree =
false;
8210 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8211 suballocItem != m_Suballocations.cend();
8214 const VmaSuballocation& subAlloc = *suballocItem;
8217 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8219 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8221 VMA_VALIDATE(!prevFree || !currFree);
8223 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8227 calculatedSumFreeSize += subAlloc.size;
8228 ++calculatedFreeCount;
8229 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8231 ++freeSuballocationsToRegister;
8235 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8239 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8240 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8243 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8246 calculatedOffset += subAlloc.size;
8247 prevFree = currFree;
8252 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8254 VkDeviceSize lastSize = 0;
8255 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8257 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8260 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8262 VMA_VALIDATE(suballocItem->size >= lastSize);
8264 lastSize = suballocItem->size;
8268 VMA_VALIDATE(ValidateFreeSuballocationList());
8269 VMA_VALIDATE(calculatedOffset == GetSize());
8270 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8271 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8276 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8278 if(!m_FreeSuballocationsBySize.empty())
8280 return m_FreeSuballocationsBySize.back()->size;
8288 bool VmaBlockMetadata_Generic::IsEmpty()
const
8290 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8293 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8297 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8309 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8310 suballocItem != m_Suballocations.cend();
8313 const VmaSuballocation& suballoc = *suballocItem;
8314 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8327 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8329 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8331 inoutStats.
size += GetSize();
8338 #if VMA_STATS_STRING_ENABLED
8340 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8342 PrintDetailedMap_Begin(json,
8344 m_Suballocations.size() - (size_t)m_FreeCount,
8348 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8349 suballocItem != m_Suballocations.cend();
8350 ++suballocItem, ++i)
8352 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8354 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8358 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8362 PrintDetailedMap_End(json);
8365 #endif // #if VMA_STATS_STRING_ENABLED
8367 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8368 uint32_t currentFrameIndex,
8369 uint32_t frameInUseCount,
8370 VkDeviceSize bufferImageGranularity,
8371 VkDeviceSize allocSize,
8372 VkDeviceSize allocAlignment,
8374 VmaSuballocationType allocType,
8375 bool canMakeOtherLost,
8377 VmaAllocationRequest* pAllocationRequest)
8379 VMA_ASSERT(allocSize > 0);
8380 VMA_ASSERT(!upperAddress);
8381 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8382 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8383 VMA_HEAVY_ASSERT(Validate());
8385 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8388 if(canMakeOtherLost ==
false &&
8389 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8395 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8396 if(freeSuballocCount > 0)
8401 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8402 m_FreeSuballocationsBySize.data(),
8403 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8404 allocSize + 2 * VMA_DEBUG_MARGIN,
8405 VmaSuballocationItemSizeLess());
8406 size_t index = it - m_FreeSuballocationsBySize.data();
8407 for(; index < freeSuballocCount; ++index)
8412 bufferImageGranularity,
8416 m_FreeSuballocationsBySize[index],
8418 &pAllocationRequest->offset,
8419 &pAllocationRequest->itemsToMakeLostCount,
8420 &pAllocationRequest->sumFreeSize,
8421 &pAllocationRequest->sumItemSize))
8423 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8428 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8430 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8431 it != m_Suballocations.end();
8434 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8437 bufferImageGranularity,
8443 &pAllocationRequest->offset,
8444 &pAllocationRequest->itemsToMakeLostCount,
8445 &pAllocationRequest->sumFreeSize,
8446 &pAllocationRequest->sumItemSize))
8448 pAllocationRequest->item = it;
8456 for(
size_t index = freeSuballocCount; index--; )
8461 bufferImageGranularity,
8465 m_FreeSuballocationsBySize[index],
8467 &pAllocationRequest->offset,
8468 &pAllocationRequest->itemsToMakeLostCount,
8469 &pAllocationRequest->sumFreeSize,
8470 &pAllocationRequest->sumItemSize))
8472 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8479 if(canMakeOtherLost)
8484 VmaAllocationRequest tmpAllocRequest = {};
8485 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8486 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8487 suballocIt != m_Suballocations.end();
8490 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8491 suballocIt->hAllocation->CanBecomeLost())
8496 bufferImageGranularity,
8502 &tmpAllocRequest.offset,
8503 &tmpAllocRequest.itemsToMakeLostCount,
8504 &tmpAllocRequest.sumFreeSize,
8505 &tmpAllocRequest.sumItemSize))
8509 *pAllocationRequest = tmpAllocRequest;
8510 pAllocationRequest->item = suballocIt;
8513 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8515 *pAllocationRequest = tmpAllocRequest;
8516 pAllocationRequest->item = suballocIt;
8529 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8530 uint32_t currentFrameIndex,
8531 uint32_t frameInUseCount,
8532 VmaAllocationRequest* pAllocationRequest)
8534 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8536 while(pAllocationRequest->itemsToMakeLostCount > 0)
8538 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8540 ++pAllocationRequest->item;
8542 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8543 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8544 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8545 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8547 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8548 --pAllocationRequest->itemsToMakeLostCount;
8556 VMA_HEAVY_ASSERT(Validate());
8557 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8558 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8563 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8565 uint32_t lostAllocationCount = 0;
8566 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8567 it != m_Suballocations.end();
8570 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8571 it->hAllocation->CanBecomeLost() &&
8572 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8574 it = FreeSuballocation(it);
8575 ++lostAllocationCount;
8578 return lostAllocationCount;
8581 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8583 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8584 it != m_Suballocations.end();
8587 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8589 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8591 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8592 return VK_ERROR_VALIDATION_FAILED_EXT;
8594 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8596 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8597 return VK_ERROR_VALIDATION_FAILED_EXT;
8605 void VmaBlockMetadata_Generic::Alloc(
8606 const VmaAllocationRequest& request,
8607 VmaSuballocationType type,
8608 VkDeviceSize allocSize,
8611 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8612 VMA_ASSERT(request.item != m_Suballocations.end());
8613 VmaSuballocation& suballoc = *request.item;
8615 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8617 VMA_ASSERT(request.offset >= suballoc.offset);
8618 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8619 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8620 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8624 UnregisterFreeSuballocation(request.item);
8626 suballoc.offset = request.offset;
8627 suballoc.size = allocSize;
8628 suballoc.type = type;
8629 suballoc.hAllocation = hAllocation;
8634 VmaSuballocation paddingSuballoc = {};
8635 paddingSuballoc.offset = request.offset + allocSize;
8636 paddingSuballoc.size = paddingEnd;
8637 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8638 VmaSuballocationList::iterator next = request.item;
8640 const VmaSuballocationList::iterator paddingEndItem =
8641 m_Suballocations.insert(next, paddingSuballoc);
8642 RegisterFreeSuballocation(paddingEndItem);
8648 VmaSuballocation paddingSuballoc = {};
8649 paddingSuballoc.offset = request.offset - paddingBegin;
8650 paddingSuballoc.size = paddingBegin;
8651 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8652 const VmaSuballocationList::iterator paddingBeginItem =
8653 m_Suballocations.insert(request.item, paddingSuballoc);
8654 RegisterFreeSuballocation(paddingBeginItem);
8658 m_FreeCount = m_FreeCount - 1;
8659 if(paddingBegin > 0)
8667 m_SumFreeSize -= allocSize;
8670 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8672 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8673 suballocItem != m_Suballocations.end();
8676 VmaSuballocation& suballoc = *suballocItem;
8677 if(suballoc.hAllocation == allocation)
8679 FreeSuballocation(suballocItem);
8680 VMA_HEAVY_ASSERT(Validate());
8684 VMA_ASSERT(0 &&
"Not found!");
8687 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8689 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8690 suballocItem != m_Suballocations.end();
8693 VmaSuballocation& suballoc = *suballocItem;
8694 if(suballoc.offset == offset)
8696 FreeSuballocation(suballocItem);
8700 VMA_ASSERT(0 &&
"Not found!");
8703 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8705 VkDeviceSize lastSize = 0;
8706 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8708 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8710 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8711 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8712 VMA_VALIDATE(it->size >= lastSize);
8713 lastSize = it->size;
8718 bool VmaBlockMetadata_Generic::CheckAllocation(
8719 uint32_t currentFrameIndex,
8720 uint32_t frameInUseCount,
8721 VkDeviceSize bufferImageGranularity,
8722 VkDeviceSize allocSize,
8723 VkDeviceSize allocAlignment,
8724 VmaSuballocationType allocType,
8725 VmaSuballocationList::const_iterator suballocItem,
8726 bool canMakeOtherLost,
8727 VkDeviceSize* pOffset,
8728 size_t* itemsToMakeLostCount,
8729 VkDeviceSize* pSumFreeSize,
8730 VkDeviceSize* pSumItemSize)
const
8732 VMA_ASSERT(allocSize > 0);
8733 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8734 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8735 VMA_ASSERT(pOffset != VMA_NULL);
8737 *itemsToMakeLostCount = 0;
8741 if(canMakeOtherLost)
8743 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8745 *pSumFreeSize = suballocItem->size;
8749 if(suballocItem->hAllocation->CanBecomeLost() &&
8750 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8752 ++*itemsToMakeLostCount;
8753 *pSumItemSize = suballocItem->size;
8762 if(GetSize() - suballocItem->offset < allocSize)
8768 *pOffset = suballocItem->offset;
8771 if(VMA_DEBUG_MARGIN > 0)
8773 *pOffset += VMA_DEBUG_MARGIN;
8777 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8781 if(bufferImageGranularity > 1)
8783 bool bufferImageGranularityConflict =
false;
8784 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8785 while(prevSuballocItem != m_Suballocations.cbegin())
8788 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8789 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8791 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8793 bufferImageGranularityConflict =
true;
8801 if(bufferImageGranularityConflict)
8803 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8809 if(*pOffset >= suballocItem->offset + suballocItem->size)
8815 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8818 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8820 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8822 if(suballocItem->offset + totalSize > GetSize())
8829 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8830 if(totalSize > suballocItem->size)
8832 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8833 while(remainingSize > 0)
8836 if(lastSuballocItem == m_Suballocations.cend())
8840 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8842 *pSumFreeSize += lastSuballocItem->size;
8846 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8847 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8848 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8850 ++*itemsToMakeLostCount;
8851 *pSumItemSize += lastSuballocItem->size;
8858 remainingSize = (lastSuballocItem->size < remainingSize) ?
8859 remainingSize - lastSuballocItem->size : 0;
8865 if(bufferImageGranularity > 1)
8867 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8869 while(nextSuballocItem != m_Suballocations.cend())
8871 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8872 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8874 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8876 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8877 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8878 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8880 ++*itemsToMakeLostCount;
8899 const VmaSuballocation& suballoc = *suballocItem;
8900 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8902 *pSumFreeSize = suballoc.size;
8905 if(suballoc.size < allocSize)
8911 *pOffset = suballoc.offset;
8914 if(VMA_DEBUG_MARGIN > 0)
8916 *pOffset += VMA_DEBUG_MARGIN;
8920 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8924 if(bufferImageGranularity > 1)
8926 bool bufferImageGranularityConflict =
false;
8927 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8928 while(prevSuballocItem != m_Suballocations.cbegin())
8931 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8932 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8934 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8936 bufferImageGranularityConflict =
true;
8944 if(bufferImageGranularityConflict)
8946 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8951 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8954 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8957 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8964 if(bufferImageGranularity > 1)
8966 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8968 while(nextSuballocItem != m_Suballocations.cend())
8970 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8971 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8973 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8992 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8994 VMA_ASSERT(item != m_Suballocations.end());
8995 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8997 VmaSuballocationList::iterator nextItem = item;
8999 VMA_ASSERT(nextItem != m_Suballocations.end());
9000 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9002 item->size += nextItem->size;
9004 m_Suballocations.erase(nextItem);
9007 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9010 VmaSuballocation& suballoc = *suballocItem;
9011 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9012 suballoc.hAllocation = VK_NULL_HANDLE;
9016 m_SumFreeSize += suballoc.size;
9019 bool mergeWithNext =
false;
9020 bool mergeWithPrev =
false;
9022 VmaSuballocationList::iterator nextItem = suballocItem;
9024 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9026 mergeWithNext =
true;
9029 VmaSuballocationList::iterator prevItem = suballocItem;
9030 if(suballocItem != m_Suballocations.begin())
9033 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9035 mergeWithPrev =
true;
9041 UnregisterFreeSuballocation(nextItem);
9042 MergeFreeWithNext(suballocItem);
9047 UnregisterFreeSuballocation(prevItem);
9048 MergeFreeWithNext(prevItem);
9049 RegisterFreeSuballocation(prevItem);
9054 RegisterFreeSuballocation(suballocItem);
9055 return suballocItem;
9059 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9061 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9062 VMA_ASSERT(item->size > 0);
9066 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9068 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9070 if(m_FreeSuballocationsBySize.empty())
9072 m_FreeSuballocationsBySize.push_back(item);
9076 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9084 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9086 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9087 VMA_ASSERT(item->size > 0);
9091 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9093 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9095 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9096 m_FreeSuballocationsBySize.data(),
9097 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9099 VmaSuballocationItemSizeLess());
9100 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9101 index < m_FreeSuballocationsBySize.size();
9104 if(m_FreeSuballocationsBySize[index] == item)
9106 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9109 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9111 VMA_ASSERT(0 &&
"Not found.");
9117 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9118 VkDeviceSize bufferImageGranularity,
9119 VmaSuballocationType& inOutPrevSuballocType)
const
9121 if(bufferImageGranularity == 1 || IsEmpty())
9126 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9127 bool typeConflictFound =
false;
9128 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9129 it != m_Suballocations.cend();
9132 const VmaSuballocationType suballocType = it->type;
9133 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9135 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9136 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9138 typeConflictFound =
true;
9140 inOutPrevSuballocType = suballocType;
9144 return typeConflictFound || minAlignment >= bufferImageGranularity;
9150 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9151 VmaBlockMetadata(hAllocator),
9153 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9154 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9155 m_1stVectorIndex(0),
9156 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9157 m_1stNullItemsBeginCount(0),
9158 m_1stNullItemsMiddleCount(0),
9159 m_2ndNullItemsCount(0)
9163 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9167 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9169 VmaBlockMetadata::Init(size);
9170 m_SumFreeSize = size;
9173 bool VmaBlockMetadata_Linear::Validate()
const
9175 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9176 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9178 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9179 VMA_VALIDATE(!suballocations1st.empty() ||
9180 suballocations2nd.empty() ||
9181 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9183 if(!suballocations1st.empty())
9186 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9188 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9190 if(!suballocations2nd.empty())
9193 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9196 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9197 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9199 VkDeviceSize sumUsedSize = 0;
9200 const size_t suballoc1stCount = suballocations1st.size();
9201 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9203 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9205 const size_t suballoc2ndCount = suballocations2nd.size();
9206 size_t nullItem2ndCount = 0;
9207 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9209 const VmaSuballocation& suballoc = suballocations2nd[i];
9210 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9212 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9213 VMA_VALIDATE(suballoc.offset >= offset);
9217 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9218 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9219 sumUsedSize += suballoc.size;
9226 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9229 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9232 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9234 const VmaSuballocation& suballoc = suballocations1st[i];
9235 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9236 suballoc.hAllocation == VK_NULL_HANDLE);
9239 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9241 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9243 const VmaSuballocation& suballoc = suballocations1st[i];
9244 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9246 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9247 VMA_VALIDATE(suballoc.offset >= offset);
9248 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9252 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9253 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9254 sumUsedSize += suballoc.size;
9261 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9263 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9265 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9267 const size_t suballoc2ndCount = suballocations2nd.size();
9268 size_t nullItem2ndCount = 0;
9269 for(
size_t i = suballoc2ndCount; i--; )
9271 const VmaSuballocation& suballoc = suballocations2nd[i];
9272 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9274 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9275 VMA_VALIDATE(suballoc.offset >= offset);
9279 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9280 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9281 sumUsedSize += suballoc.size;
9288 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9291 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9294 VMA_VALIDATE(offset <= GetSize());
9295 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9300 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9302 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9303 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9306 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9308 const VkDeviceSize size = GetSize();
9320 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9322 switch(m_2ndVectorMode)
9324 case SECOND_VECTOR_EMPTY:
9330 const size_t suballocations1stCount = suballocations1st.size();
9331 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9332 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9333 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9335 firstSuballoc.offset,
9336 size - (lastSuballoc.offset + lastSuballoc.size));
9340 case SECOND_VECTOR_RING_BUFFER:
9345 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9346 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9347 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9348 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9352 case SECOND_VECTOR_DOUBLE_STACK:
9357 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9358 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9359 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9360 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9370 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9372 const VkDeviceSize size = GetSize();
9373 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9374 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9375 const size_t suballoc1stCount = suballocations1st.size();
9376 const size_t suballoc2ndCount = suballocations2nd.size();
9387 VkDeviceSize lastOffset = 0;
9389 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9391 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9392 size_t nextAlloc2ndIndex = 0;
9393 while(lastOffset < freeSpace2ndTo1stEnd)
9396 while(nextAlloc2ndIndex < suballoc2ndCount &&
9397 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9399 ++nextAlloc2ndIndex;
9403 if(nextAlloc2ndIndex < suballoc2ndCount)
9405 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9408 if(lastOffset < suballoc.offset)
9411 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9425 lastOffset = suballoc.offset + suballoc.size;
9426 ++nextAlloc2ndIndex;
9432 if(lastOffset < freeSpace2ndTo1stEnd)
9434 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9442 lastOffset = freeSpace2ndTo1stEnd;
9447 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9448 const VkDeviceSize freeSpace1stTo2ndEnd =
9449 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9450 while(lastOffset < freeSpace1stTo2ndEnd)
9453 while(nextAlloc1stIndex < suballoc1stCount &&
9454 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9456 ++nextAlloc1stIndex;
9460 if(nextAlloc1stIndex < suballoc1stCount)
9462 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9465 if(lastOffset < suballoc.offset)
9468 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9482 lastOffset = suballoc.offset + suballoc.size;
9483 ++nextAlloc1stIndex;
9489 if(lastOffset < freeSpace1stTo2ndEnd)
9491 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9499 lastOffset = freeSpace1stTo2ndEnd;
9503 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9505 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9506 while(lastOffset < size)
9509 while(nextAlloc2ndIndex != SIZE_MAX &&
9510 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9512 --nextAlloc2ndIndex;
9516 if(nextAlloc2ndIndex != SIZE_MAX)
9518 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9521 if(lastOffset < suballoc.offset)
9524 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9538 lastOffset = suballoc.offset + suballoc.size;
9539 --nextAlloc2ndIndex;
9545 if(lastOffset < size)
9547 const VkDeviceSize unusedRangeSize = size - lastOffset;
9563 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9565 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9566 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9567 const VkDeviceSize size = GetSize();
9568 const size_t suballoc1stCount = suballocations1st.size();
9569 const size_t suballoc2ndCount = suballocations2nd.size();
9571 inoutStats.
size += size;
9573 VkDeviceSize lastOffset = 0;
9575 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9577 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9578 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9579 while(lastOffset < freeSpace2ndTo1stEnd)
9582 while(nextAlloc2ndIndex < suballoc2ndCount &&
9583 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9585 ++nextAlloc2ndIndex;
9589 if(nextAlloc2ndIndex < suballoc2ndCount)
9591 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9594 if(lastOffset < suballoc.offset)
9597 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9608 lastOffset = suballoc.offset + suballoc.size;
9609 ++nextAlloc2ndIndex;
9614 if(lastOffset < freeSpace2ndTo1stEnd)
9617 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9624 lastOffset = freeSpace2ndTo1stEnd;
9629 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9630 const VkDeviceSize freeSpace1stTo2ndEnd =
9631 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9632 while(lastOffset < freeSpace1stTo2ndEnd)
9635 while(nextAlloc1stIndex < suballoc1stCount &&
9636 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9638 ++nextAlloc1stIndex;
9642 if(nextAlloc1stIndex < suballoc1stCount)
9644 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9647 if(lastOffset < suballoc.offset)
9650 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9661 lastOffset = suballoc.offset + suballoc.size;
9662 ++nextAlloc1stIndex;
9667 if(lastOffset < freeSpace1stTo2ndEnd)
9670 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9677 lastOffset = freeSpace1stTo2ndEnd;
9681 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9683 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9684 while(lastOffset < size)
9687 while(nextAlloc2ndIndex != SIZE_MAX &&
9688 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9690 --nextAlloc2ndIndex;
9694 if(nextAlloc2ndIndex != SIZE_MAX)
9696 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9699 if(lastOffset < suballoc.offset)
9702 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9713 lastOffset = suballoc.offset + suballoc.size;
9714 --nextAlloc2ndIndex;
9719 if(lastOffset < size)
9722 const VkDeviceSize unusedRangeSize = size - lastOffset;
9735 #if VMA_STATS_STRING_ENABLED
9736 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9738 const VkDeviceSize size = GetSize();
9739 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9740 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9741 const size_t suballoc1stCount = suballocations1st.size();
9742 const size_t suballoc2ndCount = suballocations2nd.size();
9746 size_t unusedRangeCount = 0;
9747 VkDeviceSize usedBytes = 0;
9749 VkDeviceSize lastOffset = 0;
9751 size_t alloc2ndCount = 0;
9752 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9754 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9755 size_t nextAlloc2ndIndex = 0;
9756 while(lastOffset < freeSpace2ndTo1stEnd)
9759 while(nextAlloc2ndIndex < suballoc2ndCount &&
9760 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9762 ++nextAlloc2ndIndex;
9766 if(nextAlloc2ndIndex < suballoc2ndCount)
9768 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9771 if(lastOffset < suballoc.offset)
9780 usedBytes += suballoc.size;
9783 lastOffset = suballoc.offset + suballoc.size;
9784 ++nextAlloc2ndIndex;
9789 if(lastOffset < freeSpace2ndTo1stEnd)
9796 lastOffset = freeSpace2ndTo1stEnd;
9801 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9802 size_t alloc1stCount = 0;
9803 const VkDeviceSize freeSpace1stTo2ndEnd =
9804 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9805 while(lastOffset < freeSpace1stTo2ndEnd)
9808 while(nextAlloc1stIndex < suballoc1stCount &&
9809 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9811 ++nextAlloc1stIndex;
9815 if(nextAlloc1stIndex < suballoc1stCount)
9817 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9820 if(lastOffset < suballoc.offset)
9829 usedBytes += suballoc.size;
9832 lastOffset = suballoc.offset + suballoc.size;
9833 ++nextAlloc1stIndex;
9838 if(lastOffset < size)
9845 lastOffset = freeSpace1stTo2ndEnd;
9849 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9851 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9852 while(lastOffset < size)
9855 while(nextAlloc2ndIndex != SIZE_MAX &&
9856 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9858 --nextAlloc2ndIndex;
9862 if(nextAlloc2ndIndex != SIZE_MAX)
9864 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9867 if(lastOffset < suballoc.offset)
9876 usedBytes += suballoc.size;
9879 lastOffset = suballoc.offset + suballoc.size;
9880 --nextAlloc2ndIndex;
9885 if(lastOffset < size)
9897 const VkDeviceSize unusedBytes = size - usedBytes;
9898 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9903 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9905 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9906 size_t nextAlloc2ndIndex = 0;
9907 while(lastOffset < freeSpace2ndTo1stEnd)
9910 while(nextAlloc2ndIndex < suballoc2ndCount &&
9911 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9913 ++nextAlloc2ndIndex;
9917 if(nextAlloc2ndIndex < suballoc2ndCount)
9919 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9922 if(lastOffset < suballoc.offset)
9925 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9926 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9931 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9934 lastOffset = suballoc.offset + suballoc.size;
9935 ++nextAlloc2ndIndex;
9940 if(lastOffset < freeSpace2ndTo1stEnd)
9943 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9944 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9948 lastOffset = freeSpace2ndTo1stEnd;
9953 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9954 while(lastOffset < freeSpace1stTo2ndEnd)
9957 while(nextAlloc1stIndex < suballoc1stCount &&
9958 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9960 ++nextAlloc1stIndex;
9964 if(nextAlloc1stIndex < suballoc1stCount)
9966 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9969 if(lastOffset < suballoc.offset)
9972 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9973 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9978 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9981 lastOffset = suballoc.offset + suballoc.size;
9982 ++nextAlloc1stIndex;
9987 if(lastOffset < freeSpace1stTo2ndEnd)
9990 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9991 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9995 lastOffset = freeSpace1stTo2ndEnd;
9999 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10001 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10002 while(lastOffset < size)
10005 while(nextAlloc2ndIndex != SIZE_MAX &&
10006 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10008 --nextAlloc2ndIndex;
10012 if(nextAlloc2ndIndex != SIZE_MAX)
10014 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10017 if(lastOffset < suballoc.offset)
10020 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10021 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10026 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10029 lastOffset = suballoc.offset + suballoc.size;
10030 --nextAlloc2ndIndex;
10035 if(lastOffset < size)
10038 const VkDeviceSize unusedRangeSize = size - lastOffset;
10039 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10048 PrintDetailedMap_End(json);
10050 #endif // #if VMA_STATS_STRING_ENABLED
10052 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10053 uint32_t currentFrameIndex,
10054 uint32_t frameInUseCount,
10055 VkDeviceSize bufferImageGranularity,
10056 VkDeviceSize allocSize,
10057 VkDeviceSize allocAlignment,
10059 VmaSuballocationType allocType,
10060 bool canMakeOtherLost,
10062 VmaAllocationRequest* pAllocationRequest)
10064 VMA_ASSERT(allocSize > 0);
10065 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10066 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10067 VMA_HEAVY_ASSERT(Validate());
10068 return upperAddress ?
10069 CreateAllocationRequest_UpperAddress(
10070 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10071 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10072 CreateAllocationRequest_LowerAddress(
10073 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10074 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10077 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10078 uint32_t currentFrameIndex,
10079 uint32_t frameInUseCount,
10080 VkDeviceSize bufferImageGranularity,
10081 VkDeviceSize allocSize,
10082 VkDeviceSize allocAlignment,
10083 VmaSuballocationType allocType,
10084 bool canMakeOtherLost,
10086 VmaAllocationRequest* pAllocationRequest)
10088 const VkDeviceSize size = GetSize();
10089 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10090 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10092 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10094 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10099 if(allocSize > size)
10103 VkDeviceSize resultBaseOffset = size - allocSize;
10104 if(!suballocations2nd.empty())
10106 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10107 resultBaseOffset = lastSuballoc.offset - allocSize;
10108 if(allocSize > lastSuballoc.offset)
10115 VkDeviceSize resultOffset = resultBaseOffset;
10118 if(VMA_DEBUG_MARGIN > 0)
10120 if(resultOffset < VMA_DEBUG_MARGIN)
10124 resultOffset -= VMA_DEBUG_MARGIN;
10128 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10132 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10134 bool bufferImageGranularityConflict =
false;
10135 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10137 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10138 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10140 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10142 bufferImageGranularityConflict =
true;
10150 if(bufferImageGranularityConflict)
10152 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10157 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10158 suballocations1st.back().offset + suballocations1st.back().size :
10160 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10164 if(bufferImageGranularity > 1)
10166 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10168 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10169 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10171 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10185 pAllocationRequest->offset = resultOffset;
10186 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10187 pAllocationRequest->sumItemSize = 0;
10189 pAllocationRequest->itemsToMakeLostCount = 0;
10190 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10197 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10198 uint32_t currentFrameIndex,
10199 uint32_t frameInUseCount,
10200 VkDeviceSize bufferImageGranularity,
10201 VkDeviceSize allocSize,
10202 VkDeviceSize allocAlignment,
10203 VmaSuballocationType allocType,
10204 bool canMakeOtherLost,
10206 VmaAllocationRequest* pAllocationRequest)
10208 const VkDeviceSize size = GetSize();
10209 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10210 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10212 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10216 VkDeviceSize resultBaseOffset = 0;
10217 if(!suballocations1st.empty())
10219 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10220 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10224 VkDeviceSize resultOffset = resultBaseOffset;
10227 if(VMA_DEBUG_MARGIN > 0)
10229 resultOffset += VMA_DEBUG_MARGIN;
10233 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10237 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10239 bool bufferImageGranularityConflict =
false;
10240 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10242 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10243 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10245 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10247 bufferImageGranularityConflict =
true;
10255 if(bufferImageGranularityConflict)
10257 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10261 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10262 suballocations2nd.back().offset : size;
10265 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10269 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10271 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10273 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10274 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10276 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10290 pAllocationRequest->offset = resultOffset;
10291 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10292 pAllocationRequest->sumItemSize = 0;
10294 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10295 pAllocationRequest->itemsToMakeLostCount = 0;
10302 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10304 VMA_ASSERT(!suballocations1st.empty());
10306 VkDeviceSize resultBaseOffset = 0;
10307 if(!suballocations2nd.empty())
10309 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10310 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10314 VkDeviceSize resultOffset = resultBaseOffset;
10317 if(VMA_DEBUG_MARGIN > 0)
10319 resultOffset += VMA_DEBUG_MARGIN;
10323 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10327 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10329 bool bufferImageGranularityConflict =
false;
10330 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10332 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10333 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10335 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10337 bufferImageGranularityConflict =
true;
10345 if(bufferImageGranularityConflict)
10347 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10351 pAllocationRequest->itemsToMakeLostCount = 0;
10352 pAllocationRequest->sumItemSize = 0;
10353 size_t index1st = m_1stNullItemsBeginCount;
10355 if(canMakeOtherLost)
10357 while(index1st < suballocations1st.size() &&
10358 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10361 const VmaSuballocation& suballoc = suballocations1st[index1st];
10362 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10368 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10369 if(suballoc.hAllocation->CanBecomeLost() &&
10370 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10372 ++pAllocationRequest->itemsToMakeLostCount;
10373 pAllocationRequest->sumItemSize += suballoc.size;
10385 if(bufferImageGranularity > 1)
10387 while(index1st < suballocations1st.size())
10389 const VmaSuballocation& suballoc = suballocations1st[index1st];
10390 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10392 if(suballoc.hAllocation != VK_NULL_HANDLE)
10395 if(suballoc.hAllocation->CanBecomeLost() &&
10396 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10398 ++pAllocationRequest->itemsToMakeLostCount;
10399 pAllocationRequest->sumItemSize += suballoc.size;
10417 if(index1st == suballocations1st.size() &&
10418 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10421 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10426 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10427 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10431 if(bufferImageGranularity > 1)
10433 for(
size_t nextSuballocIndex = index1st;
10434 nextSuballocIndex < suballocations1st.size();
10435 nextSuballocIndex++)
10437 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10438 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10440 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10454 pAllocationRequest->offset = resultOffset;
10455 pAllocationRequest->sumFreeSize =
10456 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10458 - pAllocationRequest->sumItemSize;
10459 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10468 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10469 uint32_t currentFrameIndex,
10470 uint32_t frameInUseCount,
10471 VmaAllocationRequest* pAllocationRequest)
10473 if(pAllocationRequest->itemsToMakeLostCount == 0)
10478 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10481 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10482 size_t index = m_1stNullItemsBeginCount;
10483 size_t madeLostCount = 0;
10484 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10486 if(index == suballocations->size())
10490 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10492 suballocations = &AccessSuballocations2nd();
10496 VMA_ASSERT(!suballocations->empty());
10498 VmaSuballocation& suballoc = (*suballocations)[index];
10499 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10501 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10502 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10503 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10505 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10506 suballoc.hAllocation = VK_NULL_HANDLE;
10507 m_SumFreeSize += suballoc.size;
10508 if(suballocations == &AccessSuballocations1st())
10510 ++m_1stNullItemsMiddleCount;
10514 ++m_2ndNullItemsCount;
10526 CleanupAfterFree();
10532 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10534 uint32_t lostAllocationCount = 0;
10536 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10537 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10539 VmaSuballocation& suballoc = suballocations1st[i];
10540 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10541 suballoc.hAllocation->CanBecomeLost() &&
10542 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10544 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10545 suballoc.hAllocation = VK_NULL_HANDLE;
10546 ++m_1stNullItemsMiddleCount;
10547 m_SumFreeSize += suballoc.size;
10548 ++lostAllocationCount;
10552 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10553 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10555 VmaSuballocation& suballoc = suballocations2nd[i];
10556 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10557 suballoc.hAllocation->CanBecomeLost() &&
10558 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10560 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10561 suballoc.hAllocation = VK_NULL_HANDLE;
10562 ++m_2ndNullItemsCount;
10563 m_SumFreeSize += suballoc.size;
10564 ++lostAllocationCount;
10568 if(lostAllocationCount)
10570 CleanupAfterFree();
10573 return lostAllocationCount;
10576 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10578 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10579 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10581 const VmaSuballocation& suballoc = suballocations1st[i];
10582 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10584 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10586 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10587 return VK_ERROR_VALIDATION_FAILED_EXT;
10589 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10591 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10592 return VK_ERROR_VALIDATION_FAILED_EXT;
10597 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10598 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10600 const VmaSuballocation& suballoc = suballocations2nd[i];
10601 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10603 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10605 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10606 return VK_ERROR_VALIDATION_FAILED_EXT;
10608 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10610 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10611 return VK_ERROR_VALIDATION_FAILED_EXT;
10619 void VmaBlockMetadata_Linear::Alloc(
10620 const VmaAllocationRequest& request,
10621 VmaSuballocationType type,
10622 VkDeviceSize allocSize,
10625 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10627 switch(request.type)
10629 case VmaAllocationRequestType::UpperAddress:
10631 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10632 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10633 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10634 suballocations2nd.push_back(newSuballoc);
10635 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10638 case VmaAllocationRequestType::EndOf1st:
10640 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10642 VMA_ASSERT(suballocations1st.empty() ||
10643 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10645 VMA_ASSERT(request.offset + allocSize <= GetSize());
10647 suballocations1st.push_back(newSuballoc);
10650 case VmaAllocationRequestType::EndOf2nd:
10652 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10654 VMA_ASSERT(!suballocations1st.empty() &&
10655 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10656 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10658 switch(m_2ndVectorMode)
10660 case SECOND_VECTOR_EMPTY:
10662 VMA_ASSERT(suballocations2nd.empty());
10663 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10665 case SECOND_VECTOR_RING_BUFFER:
10667 VMA_ASSERT(!suballocations2nd.empty());
10669 case SECOND_VECTOR_DOUBLE_STACK:
10670 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10676 suballocations2nd.push_back(newSuballoc);
10680 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10683 m_SumFreeSize -= newSuballoc.size;
10686 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10688 FreeAtOffset(allocation->GetOffset());
10691 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10693 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10694 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10696 if(!suballocations1st.empty())
10699 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10700 if(firstSuballoc.offset == offset)
10702 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10703 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10704 m_SumFreeSize += firstSuballoc.size;
10705 ++m_1stNullItemsBeginCount;
10706 CleanupAfterFree();
10712 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10713 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10715 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10716 if(lastSuballoc.offset == offset)
10718 m_SumFreeSize += lastSuballoc.size;
10719 suballocations2nd.pop_back();
10720 CleanupAfterFree();
10725 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10727 VmaSuballocation& lastSuballoc = suballocations1st.back();
10728 if(lastSuballoc.offset == offset)
10730 m_SumFreeSize += lastSuballoc.size;
10731 suballocations1st.pop_back();
10732 CleanupAfterFree();
10739 VmaSuballocation refSuballoc;
10740 refSuballoc.offset = offset;
10742 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10743 suballocations1st.begin() + m_1stNullItemsBeginCount,
10744 suballocations1st.end(),
10746 VmaSuballocationOffsetLess());
10747 if(it != suballocations1st.end())
10749 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10750 it->hAllocation = VK_NULL_HANDLE;
10751 ++m_1stNullItemsMiddleCount;
10752 m_SumFreeSize += it->size;
10753 CleanupAfterFree();
10758 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10761 VmaSuballocation refSuballoc;
10762 refSuballoc.offset = offset;
10764 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10765 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10766 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10767 if(it != suballocations2nd.end())
10769 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10770 it->hAllocation = VK_NULL_HANDLE;
10771 ++m_2ndNullItemsCount;
10772 m_SumFreeSize += it->size;
10773 CleanupAfterFree();
10778 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10781 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10783 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10784 const size_t suballocCount = AccessSuballocations1st().size();
10785 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10788 void VmaBlockMetadata_Linear::CleanupAfterFree()
10790 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10791 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10795 suballocations1st.clear();
10796 suballocations2nd.clear();
10797 m_1stNullItemsBeginCount = 0;
10798 m_1stNullItemsMiddleCount = 0;
10799 m_2ndNullItemsCount = 0;
10800 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10804 const size_t suballoc1stCount = suballocations1st.size();
10805 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10806 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10809 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10810 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10812 ++m_1stNullItemsBeginCount;
10813 --m_1stNullItemsMiddleCount;
10817 while(m_1stNullItemsMiddleCount > 0 &&
10818 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10820 --m_1stNullItemsMiddleCount;
10821 suballocations1st.pop_back();
10825 while(m_2ndNullItemsCount > 0 &&
10826 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10828 --m_2ndNullItemsCount;
10829 suballocations2nd.pop_back();
10833 while(m_2ndNullItemsCount > 0 &&
10834 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10836 --m_2ndNullItemsCount;
10837 VmaVectorRemove(suballocations2nd, 0);
10840 if(ShouldCompact1st())
10842 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10843 size_t srcIndex = m_1stNullItemsBeginCount;
10844 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10846 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10850 if(dstIndex != srcIndex)
10852 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10856 suballocations1st.resize(nonNullItemCount);
10857 m_1stNullItemsBeginCount = 0;
10858 m_1stNullItemsMiddleCount = 0;
10862 if(suballocations2nd.empty())
10864 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10868 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10870 suballocations1st.clear();
10871 m_1stNullItemsBeginCount = 0;
10873 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10876 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10877 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10878 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10879 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10881 ++m_1stNullItemsBeginCount;
10882 --m_1stNullItemsMiddleCount;
10884 m_2ndNullItemsCount = 0;
10885 m_1stVectorIndex ^= 1;
10890 VMA_HEAVY_ASSERT(Validate());
10897 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10898 VmaBlockMetadata(hAllocator),
10900 m_AllocationCount(0),
10904 memset(m_FreeList, 0,
sizeof(m_FreeList));
10907 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10909 DeleteNode(m_Root);
10912 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10914 VmaBlockMetadata::Init(size);
10916 m_UsableSize = VmaPrevPow2(size);
10917 m_SumFreeSize = m_UsableSize;
10921 while(m_LevelCount < MAX_LEVELS &&
10922 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10927 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10928 rootNode->offset = 0;
10929 rootNode->type = Node::TYPE_FREE;
10930 rootNode->parent = VMA_NULL;
10931 rootNode->buddy = VMA_NULL;
10934 AddToFreeListFront(0, rootNode);
10937 bool VmaBlockMetadata_Buddy::Validate()
const
10940 ValidationContext ctx;
10941 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10943 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10945 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10946 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10949 for(uint32_t level = 0; level < m_LevelCount; ++level)
10951 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10952 m_FreeList[level].front->free.prev == VMA_NULL);
10954 for(Node* node = m_FreeList[level].front;
10956 node = node->free.next)
10958 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10960 if(node->free.next == VMA_NULL)
10962 VMA_VALIDATE(m_FreeList[level].back == node);
10966 VMA_VALIDATE(node->free.next->free.prev == node);
10972 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10974 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10980 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
10982 for(uint32_t level = 0; level < m_LevelCount; ++level)
10984 if(m_FreeList[level].front != VMA_NULL)
10986 return LevelToNodeSize(level);
10992 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10994 const VkDeviceSize unusableSize = GetUnusableSize();
11005 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11007 if(unusableSize > 0)
11016 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11018 const VkDeviceSize unusableSize = GetUnusableSize();
11020 inoutStats.
size += GetSize();
11021 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11026 if(unusableSize > 0)
11033 #if VMA_STATS_STRING_ENABLED
11035 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11039 CalcAllocationStatInfo(stat);
11041 PrintDetailedMap_Begin(
11047 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11049 const VkDeviceSize unusableSize = GetUnusableSize();
11050 if(unusableSize > 0)
11052 PrintDetailedMap_UnusedRange(json,
11057 PrintDetailedMap_End(json);
11060 #endif // #if VMA_STATS_STRING_ENABLED
11062 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11063 uint32_t currentFrameIndex,
11064 uint32_t frameInUseCount,
11065 VkDeviceSize bufferImageGranularity,
11066 VkDeviceSize allocSize,
11067 VkDeviceSize allocAlignment,
11069 VmaSuballocationType allocType,
11070 bool canMakeOtherLost,
11072 VmaAllocationRequest* pAllocationRequest)
11074 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11078 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11079 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11080 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11082 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11083 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11086 if(allocSize > m_UsableSize)
11091 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11092 for(uint32_t level = targetLevel + 1; level--; )
11094 for(Node* freeNode = m_FreeList[level].front;
11095 freeNode != VMA_NULL;
11096 freeNode = freeNode->free.next)
11098 if(freeNode->offset % allocAlignment == 0)
11100 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11101 pAllocationRequest->offset = freeNode->offset;
11102 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11103 pAllocationRequest->sumItemSize = 0;
11104 pAllocationRequest->itemsToMakeLostCount = 0;
11105 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11114 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11115 uint32_t currentFrameIndex,
11116 uint32_t frameInUseCount,
11117 VmaAllocationRequest* pAllocationRequest)
11123 return pAllocationRequest->itemsToMakeLostCount == 0;
11126 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11135 void VmaBlockMetadata_Buddy::Alloc(
11136 const VmaAllocationRequest& request,
11137 VmaSuballocationType type,
11138 VkDeviceSize allocSize,
11141 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11143 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11144 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11146 Node* currNode = m_FreeList[currLevel].front;
11147 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11148 while(currNode->offset != request.offset)
11150 currNode = currNode->free.next;
11151 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11155 while(currLevel < targetLevel)
11159 RemoveFromFreeList(currLevel, currNode);
11161 const uint32_t childrenLevel = currLevel + 1;
11164 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11165 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11167 leftChild->offset = currNode->offset;
11168 leftChild->type = Node::TYPE_FREE;
11169 leftChild->parent = currNode;
11170 leftChild->buddy = rightChild;
11172 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11173 rightChild->type = Node::TYPE_FREE;
11174 rightChild->parent = currNode;
11175 rightChild->buddy = leftChild;
11178 currNode->type = Node::TYPE_SPLIT;
11179 currNode->split.leftChild = leftChild;
11182 AddToFreeListFront(childrenLevel, rightChild);
11183 AddToFreeListFront(childrenLevel, leftChild);
11188 currNode = m_FreeList[currLevel].front;
11197 VMA_ASSERT(currLevel == targetLevel &&
11198 currNode != VMA_NULL &&
11199 currNode->type == Node::TYPE_FREE);
11200 RemoveFromFreeList(currLevel, currNode);
11203 currNode->type = Node::TYPE_ALLOCATION;
11204 currNode->allocation.alloc = hAllocation;
11206 ++m_AllocationCount;
11208 m_SumFreeSize -= allocSize;
11211 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11213 if(node->type == Node::TYPE_SPLIT)
11215 DeleteNode(node->split.leftChild->buddy);
11216 DeleteNode(node->split.leftChild);
11219 vma_delete(GetAllocationCallbacks(), node);
11222 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11224 VMA_VALIDATE(level < m_LevelCount);
11225 VMA_VALIDATE(curr->parent == parent);
11226 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11227 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11230 case Node::TYPE_FREE:
11232 ctx.calculatedSumFreeSize += levelNodeSize;
11233 ++ctx.calculatedFreeCount;
11235 case Node::TYPE_ALLOCATION:
11236 ++ctx.calculatedAllocationCount;
11237 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11238 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11240 case Node::TYPE_SPLIT:
11242 const uint32_t childrenLevel = level + 1;
11243 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11244 const Node*
const leftChild = curr->split.leftChild;
11245 VMA_VALIDATE(leftChild != VMA_NULL);
11246 VMA_VALIDATE(leftChild->offset == curr->offset);
11247 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11249 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11251 const Node*
const rightChild = leftChild->buddy;
11252 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11253 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11255 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11266 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11269 uint32_t level = 0;
11270 VkDeviceSize currLevelNodeSize = m_UsableSize;
11271 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11272 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11275 currLevelNodeSize = nextLevelNodeSize;
11276 nextLevelNodeSize = currLevelNodeSize >> 1;
11281 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11284 Node* node = m_Root;
11285 VkDeviceSize nodeOffset = 0;
11286 uint32_t level = 0;
11287 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11288 while(node->type == Node::TYPE_SPLIT)
11290 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11291 if(offset < nodeOffset + nextLevelSize)
11293 node = node->split.leftChild;
11297 node = node->split.leftChild->buddy;
11298 nodeOffset += nextLevelSize;
11301 levelNodeSize = nextLevelSize;
11304 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11305 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11308 --m_AllocationCount;
11309 m_SumFreeSize += alloc->GetSize();
11311 node->type = Node::TYPE_FREE;
11314 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11316 RemoveFromFreeList(level, node->buddy);
11317 Node*
const parent = node->parent;
11319 vma_delete(GetAllocationCallbacks(), node->buddy);
11320 vma_delete(GetAllocationCallbacks(), node);
11321 parent->type = Node::TYPE_FREE;
11329 AddToFreeListFront(level, node);
11332 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11336 case Node::TYPE_FREE:
11342 case Node::TYPE_ALLOCATION:
11344 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11350 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11351 if(unusedRangeSize > 0)
11360 case Node::TYPE_SPLIT:
11362 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11363 const Node*
const leftChild = node->split.leftChild;
11364 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11365 const Node*
const rightChild = leftChild->buddy;
11366 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11374 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11376 VMA_ASSERT(node->type == Node::TYPE_FREE);
11379 Node*
const frontNode = m_FreeList[level].front;
11380 if(frontNode == VMA_NULL)
11382 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11383 node->free.prev = node->free.next = VMA_NULL;
11384 m_FreeList[level].front = m_FreeList[level].back = node;
11388 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11389 node->free.prev = VMA_NULL;
11390 node->free.next = frontNode;
11391 frontNode->free.prev = node;
11392 m_FreeList[level].front = node;
11396 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11398 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11401 if(node->free.prev == VMA_NULL)
11403 VMA_ASSERT(m_FreeList[level].front == node);
11404 m_FreeList[level].front = node->free.next;
11408 Node*
const prevFreeNode = node->free.prev;
11409 VMA_ASSERT(prevFreeNode->free.next == node);
11410 prevFreeNode->free.next = node->free.next;
11414 if(node->free.next == VMA_NULL)
11416 VMA_ASSERT(m_FreeList[level].back == node);
11417 m_FreeList[level].back = node->free.prev;
11421 Node*
const nextFreeNode = node->free.next;
11422 VMA_ASSERT(nextFreeNode->free.prev == node);
11423 nextFreeNode->free.prev = node->free.prev;
11427 #if VMA_STATS_STRING_ENABLED
11428 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11432 case Node::TYPE_FREE:
11433 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11435 case Node::TYPE_ALLOCATION:
11437 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11438 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11439 if(allocSize < levelNodeSize)
11441 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11445 case Node::TYPE_SPLIT:
11447 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11448 const Node*
const leftChild = node->split.leftChild;
11449 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11450 const Node*
const rightChild = leftChild->buddy;
11451 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11458 #endif // #if VMA_STATS_STRING_ENABLED
11464 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11465 m_pMetadata(VMA_NULL),
11466 m_MemoryTypeIndex(UINT32_MAX),
11468 m_hMemory(VK_NULL_HANDLE),
11470 m_pMappedData(VMA_NULL)
11474 void VmaDeviceMemoryBlock::Init(
11477 uint32_t newMemoryTypeIndex,
11478 VkDeviceMemory newMemory,
11479 VkDeviceSize newSize,
11481 uint32_t algorithm)
11483 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11485 m_hParentPool = hParentPool;
11486 m_MemoryTypeIndex = newMemoryTypeIndex;
11488 m_hMemory = newMemory;
11493 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11496 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11502 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11504 m_pMetadata->Init(newSize);
11507 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11511 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11513 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11514 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11515 m_hMemory = VK_NULL_HANDLE;
11517 vma_delete(allocator, m_pMetadata);
11518 m_pMetadata = VMA_NULL;
11521 bool VmaDeviceMemoryBlock::Validate()
const
11523 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11524 (m_pMetadata->GetSize() != 0));
11526 return m_pMetadata->Validate();
11529 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11531 void* pData =
nullptr;
11532 VkResult res = Map(hAllocator, 1, &pData);
11533 if(res != VK_SUCCESS)
11538 res = m_pMetadata->CheckCorruption(pData);
11540 Unmap(hAllocator, 1);
11545 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11552 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11553 if(m_MapCount != 0)
11555 m_MapCount += count;
11556 VMA_ASSERT(m_pMappedData != VMA_NULL);
11557 if(ppData != VMA_NULL)
11559 *ppData = m_pMappedData;
11565 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11566 hAllocator->m_hDevice,
11572 if(result == VK_SUCCESS)
11574 if(ppData != VMA_NULL)
11576 *ppData = m_pMappedData;
11578 m_MapCount = count;
11584 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11591 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11592 if(m_MapCount >= count)
11594 m_MapCount -= count;
11595 if(m_MapCount == 0)
11597 m_pMappedData = VMA_NULL;
11598 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11603 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11607 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11609 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11610 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11613 VkResult res = Map(hAllocator, 1, &pData);
11614 if(res != VK_SUCCESS)
11619 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11620 VmaWriteMagicValue(pData, allocOffset + allocSize);
11622 Unmap(hAllocator, 1);
11627 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11629 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11630 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11633 VkResult res = Map(hAllocator, 1, &pData);
11634 if(res != VK_SUCCESS)
11639 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11641 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11643 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11645 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11648 Unmap(hAllocator, 1);
11653 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11656 VkDeviceSize allocationLocalOffset,
11660 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11661 hAllocation->GetBlock() ==
this);
11662 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11663 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11664 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11666 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11667 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11670 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11673 VkDeviceSize allocationLocalOffset,
11677 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11678 hAllocation->GetBlock() ==
this);
11679 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11680 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11681 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11683 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11684 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11689 memset(&outInfo, 0,
sizeof(outInfo));
11708 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11716 VmaPool_T::VmaPool_T(
11719 VkDeviceSize preferredBlockSize) :
11723 createInfo.memoryTypeIndex,
11724 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11725 createInfo.minBlockCount,
11726 createInfo.maxBlockCount,
11728 createInfo.frameInUseCount,
11729 createInfo.blockSize != 0,
11736 VmaPool_T::~VmaPool_T()
11740 void VmaPool_T::SetName(
const char* pName)
11742 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
11743 VmaFreeString(allocs, m_Name);
11745 if(pName != VMA_NULL)
11747 m_Name = VmaCreateStringCopy(allocs, pName);
11755 #if VMA_STATS_STRING_ENABLED
11757 #endif // #if VMA_STATS_STRING_ENABLED
11759 VmaBlockVector::VmaBlockVector(
11762 uint32_t memoryTypeIndex,
11763 VkDeviceSize preferredBlockSize,
11764 size_t minBlockCount,
11765 size_t maxBlockCount,
11766 VkDeviceSize bufferImageGranularity,
11767 uint32_t frameInUseCount,
11768 bool explicitBlockSize,
11769 uint32_t algorithm) :
11770 m_hAllocator(hAllocator),
11771 m_hParentPool(hParentPool),
11772 m_MemoryTypeIndex(memoryTypeIndex),
11773 m_PreferredBlockSize(preferredBlockSize),
11774 m_MinBlockCount(minBlockCount),
11775 m_MaxBlockCount(maxBlockCount),
11776 m_BufferImageGranularity(bufferImageGranularity),
11777 m_FrameInUseCount(frameInUseCount),
11778 m_ExplicitBlockSize(explicitBlockSize),
11779 m_Algorithm(algorithm),
11780 m_HasEmptyBlock(false),
11781 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11786 VmaBlockVector::~VmaBlockVector()
11788 for(
size_t i = m_Blocks.size(); i--; )
11790 m_Blocks[i]->Destroy(m_hAllocator);
11791 vma_delete(m_hAllocator, m_Blocks[i]);
11795 VkResult VmaBlockVector::CreateMinBlocks()
11797 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11799 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11800 if(res != VK_SUCCESS)
11808 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11810 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11812 const size_t blockCount = m_Blocks.size();
11821 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11823 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11824 VMA_ASSERT(pBlock);
11825 VMA_HEAVY_ASSERT(pBlock->Validate());
11826 pBlock->m_pMetadata->AddPoolStats(*pStats);
11830 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
11832 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11833 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11834 (VMA_DEBUG_MARGIN > 0) &&
11836 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11839 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11841 VkResult VmaBlockVector::Allocate(
11842 uint32_t currentFrameIndex,
11844 VkDeviceSize alignment,
11846 VmaSuballocationType suballocType,
11847 size_t allocationCount,
11851 VkResult res = VK_SUCCESS;
11853 if(IsCorruptionDetectionEnabled())
11855 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11856 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11860 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11861 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11863 res = AllocatePage(
11869 pAllocations + allocIndex);
11870 if(res != VK_SUCCESS)
11877 if(res != VK_SUCCESS)
11880 while(allocIndex--)
11882 Free(pAllocations[allocIndex]);
11884 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11890 VkResult VmaBlockVector::AllocatePage(
11891 uint32_t currentFrameIndex,
11893 VkDeviceSize alignment,
11895 VmaSuballocationType suballocType,
11904 VkDeviceSize freeMemory;
11906 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
11908 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
11912 const bool canFallbackToDedicated = !IsCustomPool();
11913 const bool canCreateNewBlock =
11915 (m_Blocks.size() < m_MaxBlockCount) &&
11916 (freeMemory >= size || !canFallbackToDedicated);
11923 canMakeOtherLost =
false;
11927 if(isUpperAddress &&
11930 return VK_ERROR_FEATURE_NOT_PRESENT;
11944 return VK_ERROR_FEATURE_NOT_PRESENT;
11948 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11950 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11958 if(!canMakeOtherLost || canCreateNewBlock)
11967 if(!m_Blocks.empty())
11969 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11970 VMA_ASSERT(pCurrBlock);
11971 VkResult res = AllocateFromBlock(
11981 if(res == VK_SUCCESS)
11983 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
11993 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11995 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11996 VMA_ASSERT(pCurrBlock);
11997 VkResult res = AllocateFromBlock(
12007 if(res == VK_SUCCESS)
12009 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12017 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12019 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12020 VMA_ASSERT(pCurrBlock);
12021 VkResult res = AllocateFromBlock(
12031 if(res == VK_SUCCESS)
12033 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12041 if(canCreateNewBlock)
12044 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12045 uint32_t newBlockSizeShift = 0;
12046 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12048 if(!m_ExplicitBlockSize)
12051 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12052 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12054 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12055 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12057 newBlockSize = smallerNewBlockSize;
12058 ++newBlockSizeShift;
12067 size_t newBlockIndex = 0;
12068 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12069 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12071 if(!m_ExplicitBlockSize)
12073 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12075 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12076 if(smallerNewBlockSize >= size)
12078 newBlockSize = smallerNewBlockSize;
12079 ++newBlockSizeShift;
12080 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12081 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12090 if(res == VK_SUCCESS)
12092 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12093 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12095 res = AllocateFromBlock(
12105 if(res == VK_SUCCESS)
12107 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12113 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12120 if(canMakeOtherLost)
12122 uint32_t tryIndex = 0;
12123 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12125 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12126 VmaAllocationRequest bestRequest = {};
12127 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12133 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12135 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12136 VMA_ASSERT(pCurrBlock);
12137 VmaAllocationRequest currRequest = {};
12138 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12141 m_BufferImageGranularity,
12150 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12151 if(pBestRequestBlock == VMA_NULL ||
12152 currRequestCost < bestRequestCost)
12154 pBestRequestBlock = pCurrBlock;
12155 bestRequest = currRequest;
12156 bestRequestCost = currRequestCost;
12158 if(bestRequestCost == 0)
12169 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12171 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12172 VMA_ASSERT(pCurrBlock);
12173 VmaAllocationRequest currRequest = {};
12174 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12177 m_BufferImageGranularity,
12186 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12187 if(pBestRequestBlock == VMA_NULL ||
12188 currRequestCost < bestRequestCost ||
12191 pBestRequestBlock = pCurrBlock;
12192 bestRequest = currRequest;
12193 bestRequestCost = currRequestCost;
12195 if(bestRequestCost == 0 ||
12205 if(pBestRequestBlock != VMA_NULL)
12209 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12210 if(res != VK_SUCCESS)
12216 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12222 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12223 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12224 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12225 UpdateHasEmptyBlock();
12226 (*pAllocation)->InitBlockAllocation(
12228 bestRequest.offset,
12235 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12236 VMA_DEBUG_LOG(
" Returned from existing block");
12237 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12238 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12239 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12241 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12243 if(IsCorruptionDetectionEnabled())
12245 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12246 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12261 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12263 return VK_ERROR_TOO_MANY_OBJECTS;
12267 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12270 void VmaBlockVector::Free(
12273 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12275 bool budgetExceeded =
false;
12277 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12279 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12280 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12285 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12287 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12289 if(IsCorruptionDetectionEnabled())
12291 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12292 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12295 if(hAllocation->IsPersistentMap())
12297 pBlock->Unmap(m_hAllocator, 1);
12300 pBlock->m_pMetadata->Free(hAllocation);
12301 VMA_HEAVY_ASSERT(pBlock->Validate());
12303 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12305 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12307 if(pBlock->m_pMetadata->IsEmpty())
12310 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12312 pBlockToDelete = pBlock;
12319 else if(m_HasEmptyBlock && canDeleteBlock)
12321 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12322 if(pLastBlock->m_pMetadata->IsEmpty())
12324 pBlockToDelete = pLastBlock;
12325 m_Blocks.pop_back();
12329 UpdateHasEmptyBlock();
12330 IncrementallySortBlocks();
12335 if(pBlockToDelete != VMA_NULL)
12337 VMA_DEBUG_LOG(
" Deleted empty block");
12338 pBlockToDelete->Destroy(m_hAllocator);
12339 vma_delete(m_hAllocator, pBlockToDelete);
12343 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12345 VkDeviceSize result = 0;
12346 for(
size_t i = m_Blocks.size(); i--; )
12348 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12349 if(result >= m_PreferredBlockSize)
12357 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12359 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12361 if(m_Blocks[blockIndex] == pBlock)
12363 VmaVectorRemove(m_Blocks, blockIndex);
12370 void VmaBlockVector::IncrementallySortBlocks()
12375 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12377 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12379 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12386 VkResult VmaBlockVector::AllocateFromBlock(
12387 VmaDeviceMemoryBlock* pBlock,
12388 uint32_t currentFrameIndex,
12390 VkDeviceSize alignment,
12393 VmaSuballocationType suballocType,
12402 VmaAllocationRequest currRequest = {};
12403 if(pBlock->m_pMetadata->CreateAllocationRequest(
12406 m_BufferImageGranularity,
12416 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12420 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12421 if(res != VK_SUCCESS)
12427 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12428 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12429 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12430 UpdateHasEmptyBlock();
12431 (*pAllocation)->InitBlockAllocation(
12433 currRequest.offset,
12440 VMA_HEAVY_ASSERT(pBlock->Validate());
12441 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12442 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12443 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12445 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12447 if(IsCorruptionDetectionEnabled())
12449 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12450 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12454 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12457 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12459 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12460 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12461 allocInfo.allocationSize = blockSize;
12462 VkDeviceMemory mem = VK_NULL_HANDLE;
12463 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12472 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12478 allocInfo.allocationSize,
12482 m_Blocks.push_back(pBlock);
12483 if(pNewBlockIndex != VMA_NULL)
12485 *pNewBlockIndex = m_Blocks.size() - 1;
12491 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12492 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12493 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12495 const size_t blockCount = m_Blocks.size();
12496 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12500 BLOCK_FLAG_USED = 0x00000001,
12501 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12509 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12510 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12511 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12514 const size_t moveCount = moves.size();
12515 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12517 const VmaDefragmentationMove& move = moves[moveIndex];
12518 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12519 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12522 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12525 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12527 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12528 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12529 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12531 currBlockInfo.pMappedData = pBlock->GetMappedData();
12533 if(currBlockInfo.pMappedData == VMA_NULL)
12535 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12536 if(pDefragCtx->res == VK_SUCCESS)
12538 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12545 if(pDefragCtx->res == VK_SUCCESS)
12547 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12548 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12550 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12552 const VmaDefragmentationMove& move = moves[moveIndex];
12554 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12555 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12557 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12562 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12563 memRange.memory = pSrcBlock->GetDeviceMemory();
12564 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12565 memRange.size = VMA_MIN(
12566 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12567 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12568 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12573 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12574 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12575 static_cast<size_t>(move.size));
12577 if(IsCorruptionDetectionEnabled())
12579 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12580 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12586 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12587 memRange.memory = pDstBlock->GetDeviceMemory();
12588 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12589 memRange.size = VMA_MIN(
12590 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12591 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12592 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12599 for(
size_t blockIndex = blockCount; blockIndex--; )
12601 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12602 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12604 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12605 pBlock->Unmap(m_hAllocator, 1);
12610 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12611 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12612 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12613 VkCommandBuffer commandBuffer)
12615 const size_t blockCount = m_Blocks.size();
12617 pDefragCtx->blockContexts.resize(blockCount);
12618 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12621 const size_t moveCount = moves.size();
12622 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12624 const VmaDefragmentationMove& move = moves[moveIndex];
12625 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12626 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12629 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12633 VkBufferCreateInfo bufCreateInfo;
12634 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12636 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12638 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12639 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12640 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12642 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12643 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12644 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12645 if(pDefragCtx->res == VK_SUCCESS)
12647 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12648 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12655 if(pDefragCtx->res == VK_SUCCESS)
12657 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12659 const VmaDefragmentationMove& move = moves[moveIndex];
12661 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12662 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12664 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12666 VkBufferCopy region = {
12670 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12671 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12676 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12678 pDefragCtx->res = VK_NOT_READY;
12684 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12686 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12687 if(pBlock->m_pMetadata->IsEmpty())
12689 if(m_Blocks.size() > m_MinBlockCount)
12691 if(pDefragmentationStats != VMA_NULL)
12694 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12697 VmaVectorRemove(m_Blocks, blockIndex);
12698 pBlock->Destroy(m_hAllocator);
12699 vma_delete(m_hAllocator, pBlock);
12707 UpdateHasEmptyBlock();
12710 void VmaBlockVector::UpdateHasEmptyBlock()
12712 m_HasEmptyBlock =
false;
12713 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12715 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12716 if(pBlock->m_pMetadata->IsEmpty())
12718 m_HasEmptyBlock =
true;
12724 #if VMA_STATS_STRING_ENABLED
12726 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12728 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12730 json.BeginObject();
12734 const char* poolName = m_hParentPool->GetName();
12735 if(poolName != VMA_NULL && poolName[0] !=
'\0')
12737 json.WriteString(
"Name");
12738 json.WriteString(poolName);
12741 json.WriteString(
"MemoryTypeIndex");
12742 json.WriteNumber(m_MemoryTypeIndex);
12744 json.WriteString(
"BlockSize");
12745 json.WriteNumber(m_PreferredBlockSize);
12747 json.WriteString(
"BlockCount");
12748 json.BeginObject(
true);
12749 if(m_MinBlockCount > 0)
12751 json.WriteString(
"Min");
12752 json.WriteNumber((uint64_t)m_MinBlockCount);
12754 if(m_MaxBlockCount < SIZE_MAX)
12756 json.WriteString(
"Max");
12757 json.WriteNumber((uint64_t)m_MaxBlockCount);
12759 json.WriteString(
"Cur");
12760 json.WriteNumber((uint64_t)m_Blocks.size());
12763 if(m_FrameInUseCount > 0)
12765 json.WriteString(
"FrameInUseCount");
12766 json.WriteNumber(m_FrameInUseCount);
12769 if(m_Algorithm != 0)
12771 json.WriteString(
"Algorithm");
12772 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12777 json.WriteString(
"PreferredBlockSize");
12778 json.WriteNumber(m_PreferredBlockSize);
12781 json.WriteString(
"Blocks");
12782 json.BeginObject();
12783 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12785 json.BeginString();
12786 json.ContinueString(m_Blocks[i]->GetId());
12789 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12796 #endif // #if VMA_STATS_STRING_ENABLED
12798 void VmaBlockVector::Defragment(
12799 class VmaBlockVectorDefragmentationContext* pCtx,
12801 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12802 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12803 VkCommandBuffer commandBuffer)
12805 pCtx->res = VK_SUCCESS;
12807 const VkMemoryPropertyFlags memPropFlags =
12808 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12809 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12811 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12813 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12814 !IsCorruptionDetectionEnabled() &&
12815 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12818 if(canDefragmentOnCpu || canDefragmentOnGpu)
12820 bool defragmentOnGpu;
12822 if(canDefragmentOnGpu != canDefragmentOnCpu)
12824 defragmentOnGpu = canDefragmentOnGpu;
12829 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12830 m_hAllocator->IsIntegratedGpu();
12833 bool overlappingMoveSupported = !defragmentOnGpu;
12835 if(m_hAllocator->m_UseMutex)
12837 m_Mutex.LockWrite();
12838 pCtx->mutexLocked =
true;
12841 pCtx->Begin(overlappingMoveSupported);
12845 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12846 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12847 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12848 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12849 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12852 if(pStats != VMA_NULL)
12854 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12855 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12858 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12859 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12860 if(defragmentOnGpu)
12862 maxGpuBytesToMove -= bytesMoved;
12863 maxGpuAllocationsToMove -= allocationsMoved;
12867 maxCpuBytesToMove -= bytesMoved;
12868 maxCpuAllocationsToMove -= allocationsMoved;
12872 if(pCtx->res >= VK_SUCCESS)
12874 if(defragmentOnGpu)
12876 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12880 ApplyDefragmentationMovesCpu(pCtx, moves);
12886 void VmaBlockVector::DefragmentationEnd(
12887 class VmaBlockVectorDefragmentationContext* pCtx,
12891 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12893 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12894 if(blockCtx.hBuffer)
12896 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12897 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12901 if(pCtx->res >= VK_SUCCESS)
12903 FreeEmptyBlocks(pStats);
12906 if(pCtx->mutexLocked)
12908 VMA_ASSERT(m_hAllocator->m_UseMutex);
12909 m_Mutex.UnlockWrite();
12913 size_t VmaBlockVector::CalcAllocationCount()
const
12916 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12918 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12923 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
12925 if(m_BufferImageGranularity == 1)
12929 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12930 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12932 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12933 VMA_ASSERT(m_Algorithm == 0);
12934 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12935 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12943 void VmaBlockVector::MakePoolAllocationsLost(
12944 uint32_t currentFrameIndex,
12945 size_t* pLostAllocationCount)
12947 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12948 size_t lostAllocationCount = 0;
12949 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12951 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12952 VMA_ASSERT(pBlock);
12953 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12955 if(pLostAllocationCount != VMA_NULL)
12957 *pLostAllocationCount = lostAllocationCount;
12961 VkResult VmaBlockVector::CheckCorruption()
12963 if(!IsCorruptionDetectionEnabled())
12965 return VK_ERROR_FEATURE_NOT_PRESENT;
12968 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12969 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12971 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12972 VMA_ASSERT(pBlock);
12973 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12974 if(res != VK_SUCCESS)
12982 void VmaBlockVector::AddStats(
VmaStats* pStats)
12984 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12985 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12987 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12989 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12991 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12992 VMA_ASSERT(pBlock);
12993 VMA_HEAVY_ASSERT(pBlock->Validate());
12995 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12996 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12997 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12998 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13005 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13007 VmaBlockVector* pBlockVector,
13008 uint32_t currentFrameIndex,
13009 bool overlappingMoveSupported) :
13010 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13011 m_AllocationCount(0),
13012 m_AllAllocations(false),
13014 m_AllocationsMoved(0),
13015 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13018 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13019 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13021 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13022 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13023 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13024 m_Blocks.push_back(pBlockInfo);
13028 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13031 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13033 for(
size_t i = m_Blocks.size(); i--; )
13035 vma_delete(m_hAllocator, m_Blocks[i]);
13039 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13042 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13044 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13045 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13046 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13048 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13049 (*it)->m_Allocations.push_back(allocInfo);
13056 ++m_AllocationCount;
13060 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13061 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13062 VkDeviceSize maxBytesToMove,
13063 uint32_t maxAllocationsToMove)
13065 if(m_Blocks.empty())
13078 size_t srcBlockMinIndex = 0;
13091 size_t srcBlockIndex = m_Blocks.size() - 1;
13092 size_t srcAllocIndex = SIZE_MAX;
13098 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13100 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13103 if(srcBlockIndex == srcBlockMinIndex)
13110 srcAllocIndex = SIZE_MAX;
13115 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13119 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13120 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13122 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13123 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13124 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13125 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13128 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13130 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13131 VmaAllocationRequest dstAllocRequest;
13132 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13133 m_CurrentFrameIndex,
13134 m_pBlockVector->GetFrameInUseCount(),
13135 m_pBlockVector->GetBufferImageGranularity(),
13142 &dstAllocRequest) &&
13144 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13146 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13149 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13150 (m_BytesMoved + size > maxBytesToMove))
13155 VmaDefragmentationMove move;
13156 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13157 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13158 move.srcOffset = srcOffset;
13159 move.dstOffset = dstAllocRequest.offset;
13161 moves.push_back(move);
13163 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13167 allocInfo.m_hAllocation);
13168 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13170 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13172 if(allocInfo.m_pChanged != VMA_NULL)
13174 *allocInfo.m_pChanged = VK_TRUE;
13177 ++m_AllocationsMoved;
13178 m_BytesMoved += size;
13180 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13188 if(srcAllocIndex > 0)
13194 if(srcBlockIndex > 0)
13197 srcAllocIndex = SIZE_MAX;
13207 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13210 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13212 if(m_Blocks[i]->m_HasNonMovableAllocations)
13220 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13221 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13222 VkDeviceSize maxBytesToMove,
13223 uint32_t maxAllocationsToMove)
13225 if(!m_AllAllocations && m_AllocationCount == 0)
13230 const size_t blockCount = m_Blocks.size();
13231 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13233 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13235 if(m_AllAllocations)
13237 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13238 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13239 it != pMetadata->m_Suballocations.end();
13242 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13244 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13245 pBlockInfo->m_Allocations.push_back(allocInfo);
13250 pBlockInfo->CalcHasNonMovableAllocations();
13254 pBlockInfo->SortAllocationsByOffsetDescending();
13260 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13263 const uint32_t roundCount = 2;
13266 VkResult result = VK_SUCCESS;
13267 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13269 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
13275 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13276 size_t dstBlockIndex, VkDeviceSize dstOffset,
13277 size_t srcBlockIndex, VkDeviceSize srcOffset)
13279 if(dstBlockIndex < srcBlockIndex)
13283 if(dstBlockIndex > srcBlockIndex)
13287 if(dstOffset < srcOffset)
13297 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13299 VmaBlockVector* pBlockVector,
13300 uint32_t currentFrameIndex,
13301 bool overlappingMoveSupported) :
13302 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13303 m_OverlappingMoveSupported(overlappingMoveSupported),
13304 m_AllocationCount(0),
13305 m_AllAllocations(false),
13307 m_AllocationsMoved(0),
13308 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13310 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13314 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13318 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13319 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13320 VkDeviceSize maxBytesToMove,
13321 uint32_t maxAllocationsToMove)
13323 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13325 const size_t blockCount = m_pBlockVector->GetBlockCount();
13326 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13331 PreprocessMetadata();
13335 m_BlockInfos.resize(blockCount);
13336 for(
size_t i = 0; i < blockCount; ++i)
13338 m_BlockInfos[i].origBlockIndex = i;
13341 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13342 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13343 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13348 FreeSpaceDatabase freeSpaceDb;
13350 size_t dstBlockInfoIndex = 0;
13351 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13352 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13353 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13354 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13355 VkDeviceSize dstOffset = 0;
13358 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13360 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13361 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13362 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13363 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13364 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13366 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13367 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13368 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13369 if(m_AllocationsMoved == maxAllocationsToMove ||
13370 m_BytesMoved + srcAllocSize > maxBytesToMove)
13375 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13378 size_t freeSpaceInfoIndex;
13379 VkDeviceSize dstAllocOffset;
13380 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13381 freeSpaceInfoIndex, dstAllocOffset))
13383 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13384 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13385 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13388 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13390 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13394 VmaSuballocation suballoc = *srcSuballocIt;
13395 suballoc.offset = dstAllocOffset;
13396 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13397 m_BytesMoved += srcAllocSize;
13398 ++m_AllocationsMoved;
13400 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13402 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13403 srcSuballocIt = nextSuballocIt;
13405 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13407 VmaDefragmentationMove move = {
13408 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13409 srcAllocOffset, dstAllocOffset,
13411 moves.push_back(move);
13418 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13420 VmaSuballocation suballoc = *srcSuballocIt;
13421 suballoc.offset = dstAllocOffset;
13422 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13423 m_BytesMoved += srcAllocSize;
13424 ++m_AllocationsMoved;
13426 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13428 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13429 srcSuballocIt = nextSuballocIt;
13431 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13433 VmaDefragmentationMove move = {
13434 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13435 srcAllocOffset, dstAllocOffset,
13437 moves.push_back(move);
13442 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13445 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13446 dstAllocOffset + srcAllocSize > dstBlockSize)
13449 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13451 ++dstBlockInfoIndex;
13452 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13453 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13454 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13455 dstBlockSize = pDstMetadata->GetSize();
13457 dstAllocOffset = 0;
13461 if(dstBlockInfoIndex == srcBlockInfoIndex)
13463 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13465 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13467 bool skipOver = overlap;
13468 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13472 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13477 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13479 dstOffset = srcAllocOffset + srcAllocSize;
13485 srcSuballocIt->offset = dstAllocOffset;
13486 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13487 dstOffset = dstAllocOffset + srcAllocSize;
13488 m_BytesMoved += srcAllocSize;
13489 ++m_AllocationsMoved;
13491 VmaDefragmentationMove move = {
13492 srcOrigBlockIndex, dstOrigBlockIndex,
13493 srcAllocOffset, dstAllocOffset,
13495 moves.push_back(move);
13503 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13504 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13506 VmaSuballocation suballoc = *srcSuballocIt;
13507 suballoc.offset = dstAllocOffset;
13508 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13509 dstOffset = dstAllocOffset + srcAllocSize;
13510 m_BytesMoved += srcAllocSize;
13511 ++m_AllocationsMoved;
13513 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13515 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13516 srcSuballocIt = nextSuballocIt;
13518 pDstMetadata->m_Suballocations.push_back(suballoc);
13520 VmaDefragmentationMove move = {
13521 srcOrigBlockIndex, dstOrigBlockIndex,
13522 srcAllocOffset, dstAllocOffset,
13524 moves.push_back(move);
13530 m_BlockInfos.clear();
13532 PostprocessMetadata();
13537 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13539 const size_t blockCount = m_pBlockVector->GetBlockCount();
13540 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13542 VmaBlockMetadata_Generic*
const pMetadata =
13543 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13544 pMetadata->m_FreeCount = 0;
13545 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13546 pMetadata->m_FreeSuballocationsBySize.clear();
13547 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13548 it != pMetadata->m_Suballocations.end(); )
13550 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13552 VmaSuballocationList::iterator nextIt = it;
13554 pMetadata->m_Suballocations.erase(it);
13565 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13567 const size_t blockCount = m_pBlockVector->GetBlockCount();
13568 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13570 VmaBlockMetadata_Generic*
const pMetadata =
13571 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13572 const VkDeviceSize blockSize = pMetadata->GetSize();
13575 if(pMetadata->m_Suballocations.empty())
13577 pMetadata->m_FreeCount = 1;
13579 VmaSuballocation suballoc = {
13583 VMA_SUBALLOCATION_TYPE_FREE };
13584 pMetadata->m_Suballocations.push_back(suballoc);
13585 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13590 VkDeviceSize offset = 0;
13591 VmaSuballocationList::iterator it;
13592 for(it = pMetadata->m_Suballocations.begin();
13593 it != pMetadata->m_Suballocations.end();
13596 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13597 VMA_ASSERT(it->offset >= offset);
13600 if(it->offset > offset)
13602 ++pMetadata->m_FreeCount;
13603 const VkDeviceSize freeSize = it->offset - offset;
13604 VmaSuballocation suballoc = {
13608 VMA_SUBALLOCATION_TYPE_FREE };
13609 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13610 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13612 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13616 pMetadata->m_SumFreeSize -= it->size;
13617 offset = it->offset + it->size;
13621 if(offset < blockSize)
13623 ++pMetadata->m_FreeCount;
13624 const VkDeviceSize freeSize = blockSize - offset;
13625 VmaSuballocation suballoc = {
13629 VMA_SUBALLOCATION_TYPE_FREE };
13630 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13631 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13632 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13634 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13639 pMetadata->m_FreeSuballocationsBySize.begin(),
13640 pMetadata->m_FreeSuballocationsBySize.end(),
13641 VmaSuballocationItemSizeLess());
13644 VMA_HEAVY_ASSERT(pMetadata->Validate());
13648 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13651 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13652 while(it != pMetadata->m_Suballocations.end())
13654 if(it->offset < suballoc.offset)
13659 pMetadata->m_Suballocations.insert(it, suballoc);
13665 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13668 VmaBlockVector* pBlockVector,
13669 uint32_t currFrameIndex) :
13671 mutexLocked(false),
13672 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13673 m_hAllocator(hAllocator),
13674 m_hCustomPool(hCustomPool),
13675 m_pBlockVector(pBlockVector),
13676 m_CurrFrameIndex(currFrameIndex),
13677 m_pAlgorithm(VMA_NULL),
13678 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13679 m_AllAllocations(false)
13683 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13685 vma_delete(m_hAllocator, m_pAlgorithm);
13688 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13690 AllocInfo info = { hAlloc, pChanged };
13691 m_Allocations.push_back(info);
13694 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13696 const bool allAllocations = m_AllAllocations ||
13697 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13709 if(VMA_DEBUG_MARGIN == 0 &&
13711 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13713 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13714 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13718 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13719 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13724 m_pAlgorithm->AddAll();
13728 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13730 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13738 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13740 uint32_t currFrameIndex,
13743 m_hAllocator(hAllocator),
13744 m_CurrFrameIndex(currFrameIndex),
13747 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13749 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13752 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13754 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13756 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13757 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13758 vma_delete(m_hAllocator, pBlockVectorCtx);
13760 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13762 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13763 if(pBlockVectorCtx)
13765 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13766 vma_delete(m_hAllocator, pBlockVectorCtx);
13771 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13773 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13775 VmaPool pool = pPools[poolIndex];
13778 if(pool->m_BlockVector.GetAlgorithm() == 0)
13780 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13782 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13784 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13786 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13791 if(!pBlockVectorDefragCtx)
13793 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13796 &pool->m_BlockVector,
13798 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13801 pBlockVectorDefragCtx->AddAll();
13806 void VmaDefragmentationContext_T::AddAllocations(
13807 uint32_t allocationCount,
13809 VkBool32* pAllocationsChanged)
13812 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13815 VMA_ASSERT(hAlloc);
13817 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13819 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13821 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13823 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13825 if(hAllocPool != VK_NULL_HANDLE)
13828 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13830 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13832 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13834 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13838 if(!pBlockVectorDefragCtx)
13840 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13843 &hAllocPool->m_BlockVector,
13845 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13852 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13853 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13854 if(!pBlockVectorDefragCtx)
13856 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13859 m_hAllocator->m_pBlockVectors[memTypeIndex],
13861 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13865 if(pBlockVectorDefragCtx)
13867 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13868 &pAllocationsChanged[allocIndex] : VMA_NULL;
13869 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13875 VkResult VmaDefragmentationContext_T::Defragment(
13876 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13877 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13885 if(commandBuffer == VK_NULL_HANDLE)
13887 maxGpuBytesToMove = 0;
13888 maxGpuAllocationsToMove = 0;
13891 VkResult res = VK_SUCCESS;
13894 for(uint32_t memTypeIndex = 0;
13895 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13898 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13899 if(pBlockVectorCtx)
13901 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13902 pBlockVectorCtx->GetBlockVector()->Defragment(
13905 maxCpuBytesToMove, maxCpuAllocationsToMove,
13906 maxGpuBytesToMove, maxGpuAllocationsToMove,
13908 if(pBlockVectorCtx->res != VK_SUCCESS)
13910 res = pBlockVectorCtx->res;
13916 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13917 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13920 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13921 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13922 pBlockVectorCtx->GetBlockVector()->Defragment(
13925 maxCpuBytesToMove, maxCpuAllocationsToMove,
13926 maxGpuBytesToMove, maxGpuAllocationsToMove,
13928 if(pBlockVectorCtx->res != VK_SUCCESS)
13930 res = pBlockVectorCtx->res;
13940 #if VMA_RECORDING_ENABLED
13942 VmaRecorder::VmaRecorder() :
13947 m_StartCounter(INT64_MAX)
13953 m_UseMutex = useMutex;
13954 m_Flags = settings.
flags;
13956 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13957 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13960 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13963 return VK_ERROR_INITIALIZATION_FAILED;
13967 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13968 fprintf(m_File,
"%s\n",
"1,8");
13973 VmaRecorder::~VmaRecorder()
13975 if(m_File != VMA_NULL)
13981 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13983 CallParams callParams;
13984 GetBasicParams(callParams);
13986 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13987 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13991 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13993 CallParams callParams;
13994 GetBasicParams(callParams);
13996 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13997 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14003 CallParams callParams;
14004 GetBasicParams(callParams);
14006 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14007 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14018 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14020 CallParams callParams;
14021 GetBasicParams(callParams);
14023 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14024 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14029 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14030 const VkMemoryRequirements& vkMemReq,
14034 CallParams callParams;
14035 GetBasicParams(callParams);
14037 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14038 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14039 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14041 vkMemReq.alignment,
14042 vkMemReq.memoryTypeBits,
14050 userDataStr.GetString());
14054 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14055 const VkMemoryRequirements& vkMemReq,
14057 uint64_t allocationCount,
14060 CallParams callParams;
14061 GetBasicParams(callParams);
14063 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14064 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14065 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14067 vkMemReq.alignment,
14068 vkMemReq.memoryTypeBits,
14075 PrintPointerList(allocationCount, pAllocations);
14076 fprintf(m_File,
",%s\n", userDataStr.GetString());
14080 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14081 const VkMemoryRequirements& vkMemReq,
14082 bool requiresDedicatedAllocation,
14083 bool prefersDedicatedAllocation,
14087 CallParams callParams;
14088 GetBasicParams(callParams);
14090 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14091 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14092 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14094 vkMemReq.alignment,
14095 vkMemReq.memoryTypeBits,
14096 requiresDedicatedAllocation ? 1 : 0,
14097 prefersDedicatedAllocation ? 1 : 0,
14105 userDataStr.GetString());
14109 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14110 const VkMemoryRequirements& vkMemReq,
14111 bool requiresDedicatedAllocation,
14112 bool prefersDedicatedAllocation,
14116 CallParams callParams;
14117 GetBasicParams(callParams);
14119 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14120 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14121 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14123 vkMemReq.alignment,
14124 vkMemReq.memoryTypeBits,
14125 requiresDedicatedAllocation ? 1 : 0,
14126 prefersDedicatedAllocation ? 1 : 0,
14134 userDataStr.GetString());
14138 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14141 CallParams callParams;
14142 GetBasicParams(callParams);
14144 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14145 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14150 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14151 uint64_t allocationCount,
14154 CallParams callParams;
14155 GetBasicParams(callParams);
14157 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14158 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14159 PrintPointerList(allocationCount, pAllocations);
14160 fprintf(m_File,
"\n");
14164 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14166 const void* pUserData)
14168 CallParams callParams;
14169 GetBasicParams(callParams);
14171 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14172 UserDataString userDataStr(
14175 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14177 userDataStr.GetString());
14181 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14184 CallParams callParams;
14185 GetBasicParams(callParams);
14187 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14188 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14193 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14196 CallParams callParams;
14197 GetBasicParams(callParams);
14199 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14200 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14205 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14208 CallParams callParams;
14209 GetBasicParams(callParams);
14211 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14212 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14217 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14218 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14220 CallParams callParams;
14221 GetBasicParams(callParams);
14223 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14224 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14231 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14232 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14234 CallParams callParams;
14235 GetBasicParams(callParams);
14237 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14238 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14245 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14246 const VkBufferCreateInfo& bufCreateInfo,
14250 CallParams callParams;
14251 GetBasicParams(callParams);
14253 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14254 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14255 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14256 bufCreateInfo.flags,
14257 bufCreateInfo.size,
14258 bufCreateInfo.usage,
14259 bufCreateInfo.sharingMode,
14260 allocCreateInfo.
flags,
14261 allocCreateInfo.
usage,
14265 allocCreateInfo.
pool,
14267 userDataStr.GetString());
14271 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14272 const VkImageCreateInfo& imageCreateInfo,
14276 CallParams callParams;
14277 GetBasicParams(callParams);
14279 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14280 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14281 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14282 imageCreateInfo.flags,
14283 imageCreateInfo.imageType,
14284 imageCreateInfo.format,
14285 imageCreateInfo.extent.width,
14286 imageCreateInfo.extent.height,
14287 imageCreateInfo.extent.depth,
14288 imageCreateInfo.mipLevels,
14289 imageCreateInfo.arrayLayers,
14290 imageCreateInfo.samples,
14291 imageCreateInfo.tiling,
14292 imageCreateInfo.usage,
14293 imageCreateInfo.sharingMode,
14294 imageCreateInfo.initialLayout,
14295 allocCreateInfo.
flags,
14296 allocCreateInfo.
usage,
14300 allocCreateInfo.
pool,
14302 userDataStr.GetString());
14306 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14309 CallParams callParams;
14310 GetBasicParams(callParams);
14312 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14313 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14318 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14321 CallParams callParams;
14322 GetBasicParams(callParams);
14324 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14325 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14330 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14333 CallParams callParams;
14334 GetBasicParams(callParams);
14336 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14337 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14342 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14345 CallParams callParams;
14346 GetBasicParams(callParams);
14348 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14349 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14354 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14357 CallParams callParams;
14358 GetBasicParams(callParams);
14360 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14361 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14366 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14370 CallParams callParams;
14371 GetBasicParams(callParams);
14373 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14374 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14377 fprintf(m_File,
",");
14379 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14389 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14392 CallParams callParams;
14393 GetBasicParams(callParams);
14395 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14396 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14401 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14405 CallParams callParams;
14406 GetBasicParams(callParams);
14408 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14409 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14410 pool, name != VMA_NULL ? name :
"");
14416 if(pUserData != VMA_NULL)
14420 m_Str = (
const char*)pUserData;
14424 sprintf_s(m_PtrStr,
"%p", pUserData);
14434 void VmaRecorder::WriteConfiguration(
14435 const VkPhysicalDeviceProperties& devProps,
14436 const VkPhysicalDeviceMemoryProperties& memProps,
14437 uint32_t vulkanApiVersion,
14438 bool dedicatedAllocationExtensionEnabled,
14439 bool bindMemory2ExtensionEnabled,
14440 bool memoryBudgetExtensionEnabled)
14442 fprintf(m_File,
"Config,Begin\n");
14444 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
14446 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14447 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14448 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14449 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14450 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14451 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14453 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14454 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14455 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14457 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14458 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14460 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14461 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14463 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14464 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14466 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14467 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14470 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14471 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14472 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14474 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14475 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14476 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14477 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14478 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14479 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14480 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14481 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14482 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14484 fprintf(m_File,
"Config,End\n");
14487 void VmaRecorder::GetBasicParams(CallParams& outParams)
14489 outParams.threadId = GetCurrentThreadId();
14491 LARGE_INTEGER counter;
14492 QueryPerformanceCounter(&counter);
14493 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14496 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14500 fprintf(m_File,
"%p", pItems[0]);
14501 for(uint64_t i = 1; i < count; ++i)
14503 fprintf(m_File,
" %p", pItems[i]);
14508 void VmaRecorder::Flush()
14516 #endif // #if VMA_RECORDING_ENABLED
14521 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14522 m_Allocator(pAllocationCallbacks, 1024)
14528 VmaMutexLock mutexLock(m_Mutex);
14529 return m_Allocator.Alloc();
14532 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14534 VmaMutexLock mutexLock(m_Mutex);
14535 m_Allocator.Free(hAlloc);
14543 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
14547 m_hDevice(pCreateInfo->device),
14548 m_hInstance(pCreateInfo->instance),
14549 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14550 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14551 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14552 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14553 m_HeapSizeLimitMask(0),
14554 m_PreferredLargeHeapBlockSize(0),
14555 m_PhysicalDevice(pCreateInfo->physicalDevice),
14556 m_CurrentFrameIndex(0),
14557 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14558 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14561 ,m_pRecorder(VMA_NULL)
14564 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14566 m_UseKhrDedicatedAllocation =
false;
14567 m_UseKhrBindMemory2 =
false;
14570 if(VMA_DEBUG_DETECT_CORRUPTION)
14573 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14578 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14580 #if !(VMA_DEDICATED_ALLOCATION)
14583 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14586 #if !(VMA_BIND_MEMORY2)
14589 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14593 #if !(VMA_MEMORY_BUDGET)
14596 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14599 #if VMA_VULKAN_VERSION < 1001000
14600 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14602 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
14606 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14607 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14608 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14610 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14611 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14612 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14622 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14623 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14625 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14626 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14627 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14628 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14635 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14637 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14638 if(limit != VK_WHOLE_SIZE)
14640 m_HeapSizeLimitMask |= 1u << heapIndex;
14641 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14643 m_MemProps.memoryHeaps[heapIndex].size = limit;
14649 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14651 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14653 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14657 preferredBlockSize,
14660 GetBufferImageGranularity(),
14666 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14673 VkResult res = VK_SUCCESS;
14678 #if VMA_RECORDING_ENABLED
14679 m_pRecorder = vma_new(
this, VmaRecorder)();
14681 if(res != VK_SUCCESS)
14685 m_pRecorder->WriteConfiguration(
14686 m_PhysicalDeviceProperties,
14688 m_VulkanApiVersion,
14689 m_UseKhrDedicatedAllocation,
14690 m_UseKhrBindMemory2,
14691 m_UseExtMemoryBudget);
14692 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14694 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14695 return VK_ERROR_FEATURE_NOT_PRESENT;
14699 #if VMA_MEMORY_BUDGET
14700 if(m_UseExtMemoryBudget)
14702 UpdateVulkanBudget();
14704 #endif // #if VMA_MEMORY_BUDGET
14709 VmaAllocator_T::~VmaAllocator_T()
14711 #if VMA_RECORDING_ENABLED
14712 if(m_pRecorder != VMA_NULL)
14714 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14715 vma_delete(
this, m_pRecorder);
14719 VMA_ASSERT(m_Pools.empty());
14721 for(
size_t i = GetMemoryTypeCount(); i--; )
14723 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14725 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14728 vma_delete(
this, m_pDedicatedAllocations[i]);
14729 vma_delete(
this, m_pBlockVectors[i]);
14733 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14735 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14736 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14737 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14738 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14739 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14740 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14741 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14742 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14743 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14744 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14745 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14746 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14747 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14748 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14749 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14750 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14751 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14752 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14753 #if VMA_VULKAN_VERSION >= 1001000
14754 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14756 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14757 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14758 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2");
14759 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14760 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2");
14761 m_VulkanFunctions.vkBindBufferMemory2KHR =
14762 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2");
14763 m_VulkanFunctions.vkBindImageMemory2KHR =
14764 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2");
14765 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14766 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2");
14769 #if VMA_DEDICATED_ALLOCATION
14770 if(m_UseKhrDedicatedAllocation)
14772 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14773 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14774 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14775 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14778 #if VMA_BIND_MEMORY2
14779 if(m_UseKhrBindMemory2)
14781 m_VulkanFunctions.vkBindBufferMemory2KHR =
14782 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14783 m_VulkanFunctions.vkBindImageMemory2KHR =
14784 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14786 #endif // #if VMA_BIND_MEMORY2
14787 #if VMA_MEMORY_BUDGET
14788 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14790 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14791 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14792 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
14794 #endif // #if VMA_MEMORY_BUDGET
14795 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14797 #define VMA_COPY_IF_NOT_NULL(funcName) \
14798 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14800 if(pVulkanFunctions != VMA_NULL)
14802 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14803 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14804 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14805 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14806 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14807 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14808 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14809 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14810 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14811 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14812 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14813 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14814 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14815 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14816 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14817 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14818 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14819 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14820 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14821 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14823 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14824 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14825 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14827 #if VMA_MEMORY_BUDGET
14828 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
14832 #undef VMA_COPY_IF_NOT_NULL
14836 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14837 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14838 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14839 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14840 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14841 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14842 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14843 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14844 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14845 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14846 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14847 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14848 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14849 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14850 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14851 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14852 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14853 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14854 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
14856 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14857 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14860 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14861 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
14863 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14864 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14867 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
14868 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14870 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14875 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14877 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14878 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14879 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14880 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
14883 VkResult VmaAllocator_T::AllocateMemoryOfType(
14885 VkDeviceSize alignment,
14886 bool dedicatedAllocation,
14887 VkBuffer dedicatedBuffer,
14888 VkImage dedicatedImage,
14890 uint32_t memTypeIndex,
14891 VmaSuballocationType suballocType,
14892 size_t allocationCount,
14895 VMA_ASSERT(pAllocations != VMA_NULL);
14896 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14902 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14912 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14913 VMA_ASSERT(blockVector);
14915 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14916 bool preferDedicatedMemory =
14917 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14918 dedicatedAllocation ||
14920 size > preferredBlockSize / 2;
14922 if(preferDedicatedMemory &&
14924 finalCreateInfo.
pool == VK_NULL_HANDLE)
14933 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14937 return AllocateDedicatedMemory(
14953 VkResult res = blockVector->Allocate(
14954 m_CurrentFrameIndex.load(),
14961 if(res == VK_SUCCESS)
14969 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14973 res = AllocateDedicatedMemory(
14980 finalCreateInfo.pUserData,
14985 if(res == VK_SUCCESS)
14988 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14994 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15001 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15003 VmaSuballocationType suballocType,
15004 uint32_t memTypeIndex,
15007 bool isUserDataString,
15009 VkBuffer dedicatedBuffer,
15010 VkImage dedicatedImage,
15011 size_t allocationCount,
15014 VMA_ASSERT(allocationCount > 0 && pAllocations);
15018 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15020 GetBudget(&heapBudget, heapIndex, 1);
15021 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15023 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15027 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15028 allocInfo.memoryTypeIndex = memTypeIndex;
15029 allocInfo.allocationSize = size;
15031 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15032 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15033 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15035 if(dedicatedBuffer != VK_NULL_HANDLE)
15037 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15038 dedicatedAllocInfo.buffer = dedicatedBuffer;
15039 allocInfo.pNext = &dedicatedAllocInfo;
15041 else if(dedicatedImage != VK_NULL_HANDLE)
15043 dedicatedAllocInfo.image = dedicatedImage;
15044 allocInfo.pNext = &dedicatedAllocInfo;
15047 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15050 VkResult res = VK_SUCCESS;
15051 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15053 res = AllocateDedicatedMemoryPage(
15061 pAllocations + allocIndex);
15062 if(res != VK_SUCCESS)
15068 if(res == VK_SUCCESS)
15072 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15073 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15074 VMA_ASSERT(pDedicatedAllocations);
15075 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15077 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15081 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15086 while(allocIndex--)
15089 VkDeviceMemory hMemory = currAlloc->GetMemory();
15101 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15102 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15103 currAlloc->SetUserData(
this, VMA_NULL);
15105 m_AllocationObjectAllocator.Free(currAlloc);
15108 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15114 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15116 VmaSuballocationType suballocType,
15117 uint32_t memTypeIndex,
15118 const VkMemoryAllocateInfo& allocInfo,
15120 bool isUserDataString,
15124 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15125 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15128 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15132 void* pMappedData = VMA_NULL;
15135 res = (*m_VulkanFunctions.vkMapMemory)(
15144 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15145 FreeVulkanMemory(memTypeIndex, size, hMemory);
15150 *pAllocation = m_AllocationObjectAllocator.Allocate();
15151 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
15152 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15153 (*pAllocation)->SetUserData(
this, pUserData);
15154 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15155 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15157 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15163 void VmaAllocator_T::GetBufferMemoryRequirements(
15165 VkMemoryRequirements& memReq,
15166 bool& requiresDedicatedAllocation,
15167 bool& prefersDedicatedAllocation)
const
15169 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15170 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15172 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15173 memReqInfo.buffer = hBuffer;
15175 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15177 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15178 memReq2.pNext = &memDedicatedReq;
15180 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15182 memReq = memReq2.memoryRequirements;
15183 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15184 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15187 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15189 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15190 requiresDedicatedAllocation =
false;
15191 prefersDedicatedAllocation =
false;
15195 void VmaAllocator_T::GetImageMemoryRequirements(
15197 VkMemoryRequirements& memReq,
15198 bool& requiresDedicatedAllocation,
15199 bool& prefersDedicatedAllocation)
const
15201 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15202 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15204 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15205 memReqInfo.image = hImage;
15207 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15209 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15210 memReq2.pNext = &memDedicatedReq;
15212 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15214 memReq = memReq2.memoryRequirements;
15215 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15216 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15219 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15221 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15222 requiresDedicatedAllocation =
false;
15223 prefersDedicatedAllocation =
false;
15227 VkResult VmaAllocator_T::AllocateMemory(
15228 const VkMemoryRequirements& vkMemReq,
15229 bool requiresDedicatedAllocation,
15230 bool prefersDedicatedAllocation,
15231 VkBuffer dedicatedBuffer,
15232 VkImage dedicatedImage,
15234 VmaSuballocationType suballocType,
15235 size_t allocationCount,
15238 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15240 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15242 if(vkMemReq.size == 0)
15244 return VK_ERROR_VALIDATION_FAILED_EXT;
15249 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15250 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15255 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15256 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15258 if(requiresDedicatedAllocation)
15262 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15263 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15265 if(createInfo.
pool != VK_NULL_HANDLE)
15267 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15268 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15271 if((createInfo.
pool != VK_NULL_HANDLE) &&
15274 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15275 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15278 if(createInfo.
pool != VK_NULL_HANDLE)
15280 const VkDeviceSize alignmentForPool = VMA_MAX(
15281 vkMemReq.alignment,
15282 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15287 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15292 return createInfo.
pool->m_BlockVector.Allocate(
15293 m_CurrentFrameIndex.load(),
15304 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15305 uint32_t memTypeIndex = UINT32_MAX;
15307 if(res == VK_SUCCESS)
15309 VkDeviceSize alignmentForMemType = VMA_MAX(
15310 vkMemReq.alignment,
15311 GetMemoryTypeMinAlignment(memTypeIndex));
15313 res = AllocateMemoryOfType(
15315 alignmentForMemType,
15316 requiresDedicatedAllocation || prefersDedicatedAllocation,
15325 if(res == VK_SUCCESS)
15335 memoryTypeBits &= ~(1u << memTypeIndex);
15338 if(res == VK_SUCCESS)
15340 alignmentForMemType = VMA_MAX(
15341 vkMemReq.alignment,
15342 GetMemoryTypeMinAlignment(memTypeIndex));
15344 res = AllocateMemoryOfType(
15346 alignmentForMemType,
15347 requiresDedicatedAllocation || prefersDedicatedAllocation,
15356 if(res == VK_SUCCESS)
15366 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15377 void VmaAllocator_T::FreeMemory(
15378 size_t allocationCount,
15381 VMA_ASSERT(pAllocations);
15383 for(
size_t allocIndex = allocationCount; allocIndex--; )
15387 if(allocation != VK_NULL_HANDLE)
15389 if(TouchAllocation(allocation))
15391 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15393 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15396 switch(allocation->GetType())
15398 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15400 VmaBlockVector* pBlockVector = VMA_NULL;
15401 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15402 if(hPool != VK_NULL_HANDLE)
15404 pBlockVector = &hPool->m_BlockVector;
15408 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15409 pBlockVector = m_pBlockVectors[memTypeIndex];
15411 pBlockVector->Free(allocation);
15414 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15415 FreeDedicatedMemory(allocation);
15423 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15424 allocation->SetUserData(
this, VMA_NULL);
15425 allocation->Dtor();
15426 m_AllocationObjectAllocator.Free(allocation);
15431 VkResult VmaAllocator_T::ResizeAllocation(
15433 VkDeviceSize newSize)
15436 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15438 return VK_ERROR_VALIDATION_FAILED_EXT;
15440 if(newSize == alloc->GetSize())
15444 return VK_ERROR_OUT_OF_POOL_MEMORY;
15447 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15450 InitStatInfo(pStats->
total);
15451 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15453 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15457 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15459 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15460 VMA_ASSERT(pBlockVector);
15461 pBlockVector->AddStats(pStats);
15466 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15467 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15469 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15474 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15476 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15477 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15478 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15479 VMA_ASSERT(pDedicatedAllocVector);
15480 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15483 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15484 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15485 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15486 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15491 VmaPostprocessCalcStatInfo(pStats->
total);
15492 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15493 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15494 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15495 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15498 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15500 #if VMA_MEMORY_BUDGET
15501 if(m_UseExtMemoryBudget)
15503 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15505 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15506 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15508 const uint32_t heapIndex = firstHeap + i;
15510 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15513 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15515 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15516 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15520 outBudget->
usage = 0;
15524 outBudget->
budget = VMA_MIN(
15525 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15530 UpdateVulkanBudget();
15531 GetBudget(outBudget, firstHeap, heapCount);
15537 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15539 const uint32_t heapIndex = firstHeap + i;
15541 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15545 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15550 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15552 VkResult VmaAllocator_T::DefragmentationBegin(
15562 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15563 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15566 (*pContext)->AddAllocations(
15569 VkResult res = (*pContext)->Defragment(
15574 if(res != VK_NOT_READY)
15576 vma_delete(
this, *pContext);
15577 *pContext = VMA_NULL;
15583 VkResult VmaAllocator_T::DefragmentationEnd(
15586 vma_delete(
this, context);
15592 if(hAllocation->CanBecomeLost())
15598 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15599 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15602 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15606 pAllocationInfo->
offset = 0;
15607 pAllocationInfo->
size = hAllocation->GetSize();
15609 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15612 else if(localLastUseFrameIndex == localCurrFrameIndex)
15614 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15615 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15616 pAllocationInfo->
offset = hAllocation->GetOffset();
15617 pAllocationInfo->
size = hAllocation->GetSize();
15619 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15624 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15626 localLastUseFrameIndex = localCurrFrameIndex;
15633 #if VMA_STATS_STRING_ENABLED
15634 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15635 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15638 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15639 if(localLastUseFrameIndex == localCurrFrameIndex)
15645 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15647 localLastUseFrameIndex = localCurrFrameIndex;
15653 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15654 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15655 pAllocationInfo->
offset = hAllocation->GetOffset();
15656 pAllocationInfo->
size = hAllocation->GetSize();
15657 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15658 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15662 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15665 if(hAllocation->CanBecomeLost())
15667 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15668 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15671 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15675 else if(localLastUseFrameIndex == localCurrFrameIndex)
15681 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15683 localLastUseFrameIndex = localCurrFrameIndex;
15690 #if VMA_STATS_STRING_ENABLED
15691 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15692 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15695 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15696 if(localLastUseFrameIndex == localCurrFrameIndex)
15702 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15704 localLastUseFrameIndex = localCurrFrameIndex;
15716 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15726 return VK_ERROR_INITIALIZATION_FAILED;
15729 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15731 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15733 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15734 if(res != VK_SUCCESS)
15736 vma_delete(
this, *pPool);
15743 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15744 (*pPool)->SetId(m_NextPoolId++);
15745 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15751 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15755 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15756 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15757 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15760 vma_delete(
this, pool);
15765 pool->m_BlockVector.GetPoolStats(pPoolStats);
15768 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15770 m_CurrentFrameIndex.store(frameIndex);
15772 #if VMA_MEMORY_BUDGET
15773 if(m_UseExtMemoryBudget)
15775 UpdateVulkanBudget();
15777 #endif // #if VMA_MEMORY_BUDGET
15780 void VmaAllocator_T::MakePoolAllocationsLost(
15782 size_t* pLostAllocationCount)
15784 hPool->m_BlockVector.MakePoolAllocationsLost(
15785 m_CurrentFrameIndex.load(),
15786 pLostAllocationCount);
15789 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15791 return hPool->m_BlockVector.CheckCorruption();
15794 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15796 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15799 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15801 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15803 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15804 VMA_ASSERT(pBlockVector);
15805 VkResult localRes = pBlockVector->CheckCorruption();
15808 case VK_ERROR_FEATURE_NOT_PRESENT:
15811 finalRes = VK_SUCCESS;
15821 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15822 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15824 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15826 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15829 case VK_ERROR_FEATURE_NOT_PRESENT:
15832 finalRes = VK_SUCCESS;
15844 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15846 *pAllocation = m_AllocationObjectAllocator.Allocate();
15847 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15848 (*pAllocation)->InitLost();
15851 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15853 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15856 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
15858 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15859 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15862 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
15863 if(blockBytesAfterAllocation > heapSize)
15865 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15867 if(m_Budget.m_BlockBytes->compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15875 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
15879 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15881 if(res == VK_SUCCESS)
15883 #if VMA_MEMORY_BUDGET
15884 ++m_Budget.m_OperationsSinceBudgetFetch;
15888 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15890 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15895 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
15901 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15904 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15906 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15910 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15912 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
15915 VkResult VmaAllocator_T::BindVulkanBuffer(
15916 VkDeviceMemory memory,
15917 VkDeviceSize memoryOffset,
15921 if(pNext != VMA_NULL)
15923 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15924 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15925 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15927 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15928 bindBufferMemoryInfo.pNext = pNext;
15929 bindBufferMemoryInfo.buffer = buffer;
15930 bindBufferMemoryInfo.memory = memory;
15931 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15932 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15935 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15937 return VK_ERROR_EXTENSION_NOT_PRESENT;
15942 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15946 VkResult VmaAllocator_T::BindVulkanImage(
15947 VkDeviceMemory memory,
15948 VkDeviceSize memoryOffset,
15952 if(pNext != VMA_NULL)
15954 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15955 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15956 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15958 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15959 bindBufferMemoryInfo.pNext = pNext;
15960 bindBufferMemoryInfo.image = image;
15961 bindBufferMemoryInfo.memory = memory;
15962 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15963 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15966 #endif // #if VMA_BIND_MEMORY2
15968 return VK_ERROR_EXTENSION_NOT_PRESENT;
15973 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15977 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15979 if(hAllocation->CanBecomeLost())
15981 return VK_ERROR_MEMORY_MAP_FAILED;
15984 switch(hAllocation->GetType())
15986 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15988 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15989 char *pBytes = VMA_NULL;
15990 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15991 if(res == VK_SUCCESS)
15993 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15994 hAllocation->BlockAllocMap();
15998 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15999 return hAllocation->DedicatedAllocMap(
this, ppData);
16002 return VK_ERROR_MEMORY_MAP_FAILED;
16008 switch(hAllocation->GetType())
16010 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16012 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16013 hAllocation->BlockAllocUnmap();
16014 pBlock->Unmap(
this, 1);
16017 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16018 hAllocation->DedicatedAllocUnmap(
this);
16025 VkResult VmaAllocator_T::BindBufferMemory(
16027 VkDeviceSize allocationLocalOffset,
16031 VkResult res = VK_SUCCESS;
16032 switch(hAllocation->GetType())
16034 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16035 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16037 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16039 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16040 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16041 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16050 VkResult VmaAllocator_T::BindImageMemory(
16052 VkDeviceSize allocationLocalOffset,
16056 VkResult res = VK_SUCCESS;
16057 switch(hAllocation->GetType())
16059 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16060 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16062 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16064 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16065 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16066 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16075 void VmaAllocator_T::FlushOrInvalidateAllocation(
16077 VkDeviceSize offset, VkDeviceSize size,
16078 VMA_CACHE_OPERATION op)
16080 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16081 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16083 const VkDeviceSize allocationSize = hAllocation->GetSize();
16084 VMA_ASSERT(offset <= allocationSize);
16086 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16088 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16089 memRange.memory = hAllocation->GetMemory();
16091 switch(hAllocation->GetType())
16093 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16094 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16095 if(size == VK_WHOLE_SIZE)
16097 memRange.size = allocationSize - memRange.offset;
16101 VMA_ASSERT(offset + size <= allocationSize);
16102 memRange.size = VMA_MIN(
16103 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16104 allocationSize - memRange.offset);
16108 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16111 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16112 if(size == VK_WHOLE_SIZE)
16114 size = allocationSize - offset;
16118 VMA_ASSERT(offset + size <= allocationSize);
16120 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16123 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16124 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16125 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16126 memRange.offset += allocationOffset;
16127 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16138 case VMA_CACHE_FLUSH:
16139 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16141 case VMA_CACHE_INVALIDATE:
16142 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16151 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16153 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16155 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16157 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16158 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16159 VMA_ASSERT(pDedicatedAllocations);
16160 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16161 VMA_ASSERT(success);
16164 VkDeviceMemory hMemory = allocation->GetMemory();
16176 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16178 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16181 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16183 VkBufferCreateInfo dummyBufCreateInfo;
16184 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16186 uint32_t memoryTypeBits = 0;
16189 VkBuffer buf = VK_NULL_HANDLE;
16190 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16191 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16192 if(res == VK_SUCCESS)
16195 VkMemoryRequirements memReq;
16196 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16197 memoryTypeBits = memReq.memoryTypeBits;
16200 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16203 return memoryTypeBits;
16206 #if VMA_MEMORY_BUDGET
16208 void VmaAllocator_T::UpdateVulkanBudget()
16210 VMA_ASSERT(m_UseExtMemoryBudget);
16212 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16214 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16215 memProps.pNext = &budgetProps;
16217 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16220 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16222 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16224 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16225 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16226 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16228 m_Budget.m_OperationsSinceBudgetFetch = 0;
16232 #endif // #if VMA_MEMORY_BUDGET
16234 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16236 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16237 !hAllocation->CanBecomeLost() &&
16238 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16240 void* pData = VMA_NULL;
16241 VkResult res = Map(hAllocation, &pData);
16242 if(res == VK_SUCCESS)
16244 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16245 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16246 Unmap(hAllocation);
16250 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16255 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16257 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16258 if(memoryTypeBits == UINT32_MAX)
16260 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16261 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16263 return memoryTypeBits;
16266 #if VMA_STATS_STRING_ENABLED
16268 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16270 bool dedicatedAllocationsStarted =
false;
16271 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16273 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16274 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16275 VMA_ASSERT(pDedicatedAllocVector);
16276 if(pDedicatedAllocVector->empty() ==
false)
16278 if(dedicatedAllocationsStarted ==
false)
16280 dedicatedAllocationsStarted =
true;
16281 json.WriteString(
"DedicatedAllocations");
16282 json.BeginObject();
16285 json.BeginString(
"Type ");
16286 json.ContinueString(memTypeIndex);
16291 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16293 json.BeginObject(
true);
16295 hAlloc->PrintParameters(json);
16302 if(dedicatedAllocationsStarted)
16308 bool allocationsStarted =
false;
16309 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16311 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16313 if(allocationsStarted ==
false)
16315 allocationsStarted =
true;
16316 json.WriteString(
"DefaultPools");
16317 json.BeginObject();
16320 json.BeginString(
"Type ");
16321 json.ContinueString(memTypeIndex);
16324 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16327 if(allocationsStarted)
16335 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16336 const size_t poolCount = m_Pools.size();
16339 json.WriteString(
"Pools");
16340 json.BeginObject();
16341 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16343 json.BeginString();
16344 json.ContinueString(m_Pools[poolIndex]->GetId());
16347 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16354 #endif // #if VMA_STATS_STRING_ENABLED
16363 VMA_ASSERT(pCreateInfo && pAllocator);
16366 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16368 return (*pAllocator)->Init(pCreateInfo);
16374 if(allocator != VK_NULL_HANDLE)
16376 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16377 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16378 vma_delete(&allocationCallbacks, allocator);
16384 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16386 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16387 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16392 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16394 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16395 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16400 uint32_t memoryTypeIndex,
16401 VkMemoryPropertyFlags* pFlags)
16403 VMA_ASSERT(allocator && pFlags);
16404 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16405 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16410 uint32_t frameIndex)
16412 VMA_ASSERT(allocator);
16413 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16415 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16417 allocator->SetCurrentFrameIndex(frameIndex);
16424 VMA_ASSERT(allocator && pStats);
16425 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16426 allocator->CalculateStats(pStats);
16433 VMA_ASSERT(allocator && pBudget);
16434 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16435 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16438 #if VMA_STATS_STRING_ENABLED
16442 char** ppStatsString,
16443 VkBool32 detailedMap)
16445 VMA_ASSERT(allocator && ppStatsString);
16446 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16448 VmaStringBuilder sb(allocator);
16450 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16451 json.BeginObject();
16454 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
16457 allocator->CalculateStats(&stats);
16459 json.WriteString(
"Total");
16460 VmaPrintStatInfo(json, stats.
total);
16462 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16464 json.BeginString(
"Heap ");
16465 json.ContinueString(heapIndex);
16467 json.BeginObject();
16469 json.WriteString(
"Size");
16470 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16472 json.WriteString(
"Flags");
16473 json.BeginArray(
true);
16474 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16476 json.WriteString(
"DEVICE_LOCAL");
16480 json.WriteString(
"Budget");
16481 json.BeginObject();
16483 json.WriteString(
"BlockBytes");
16484 json.WriteNumber(budget[heapIndex].blockBytes);
16485 json.WriteString(
"AllocationBytes");
16486 json.WriteNumber(budget[heapIndex].allocationBytes);
16487 json.WriteString(
"Usage");
16488 json.WriteNumber(budget[heapIndex].usage);
16489 json.WriteString(
"Budget");
16490 json.WriteNumber(budget[heapIndex].budget);
16496 json.WriteString(
"Stats");
16497 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16500 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16502 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16504 json.BeginString(
"Type ");
16505 json.ContinueString(typeIndex);
16508 json.BeginObject();
16510 json.WriteString(
"Flags");
16511 json.BeginArray(
true);
16512 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16513 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16515 json.WriteString(
"DEVICE_LOCAL");
16517 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16519 json.WriteString(
"HOST_VISIBLE");
16521 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
16523 json.WriteString(
"HOST_COHERENT");
16525 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
16527 json.WriteString(
"HOST_CACHED");
16529 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
16531 json.WriteString(
"LAZILY_ALLOCATED");
16537 json.WriteString(
"Stats");
16538 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
16547 if(detailedMap == VK_TRUE)
16549 allocator->PrintDetailedMap(json);
16555 const size_t len = sb.GetLength();
16556 char*
const pChars = vma_new_array(allocator,
char, len + 1);
16559 memcpy(pChars, sb.GetData(), len);
16561 pChars[len] =
'\0';
16562 *ppStatsString = pChars;
16567 char* pStatsString)
16569 if(pStatsString != VMA_NULL)
16571 VMA_ASSERT(allocator);
16572 size_t len = strlen(pStatsString);
16573 vma_delete_array(allocator, pStatsString, len + 1);
16577 #endif // #if VMA_STATS_STRING_ENABLED
16584 uint32_t memoryTypeBits,
16586 uint32_t* pMemoryTypeIndex)
16588 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16589 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16590 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16597 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
16598 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
16599 uint32_t notPreferredFlags = 0;
16602 switch(pAllocationCreateInfo->
usage)
16607 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16609 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16613 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
16616 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16617 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16619 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16623 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16624 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16627 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16630 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
16637 *pMemoryTypeIndex = UINT32_MAX;
16638 uint32_t minCost = UINT32_MAX;
16639 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16640 memTypeIndex < allocator->GetMemoryTypeCount();
16641 ++memTypeIndex, memTypeBit <<= 1)
16644 if((memTypeBit & memoryTypeBits) != 0)
16646 const VkMemoryPropertyFlags currFlags =
16647 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16649 if((requiredFlags & ~currFlags) == 0)
16652 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
16653 VmaCountBitsSet(currFlags & notPreferredFlags);
16655 if(currCost < minCost)
16657 *pMemoryTypeIndex = memTypeIndex;
16662 minCost = currCost;
16667 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16672 const VkBufferCreateInfo* pBufferCreateInfo,
16674 uint32_t* pMemoryTypeIndex)
16676 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16677 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16678 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16679 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16681 const VkDevice hDev = allocator->m_hDevice;
16682 VkBuffer hBuffer = VK_NULL_HANDLE;
16683 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16684 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16685 if(res == VK_SUCCESS)
16687 VkMemoryRequirements memReq = {};
16688 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16689 hDev, hBuffer, &memReq);
16693 memReq.memoryTypeBits,
16694 pAllocationCreateInfo,
16697 allocator->GetVulkanFunctions().vkDestroyBuffer(
16698 hDev, hBuffer, allocator->GetAllocationCallbacks());
16705 const VkImageCreateInfo* pImageCreateInfo,
16707 uint32_t* pMemoryTypeIndex)
16709 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16710 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16711 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16712 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16714 const VkDevice hDev = allocator->m_hDevice;
16715 VkImage hImage = VK_NULL_HANDLE;
16716 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16717 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16718 if(res == VK_SUCCESS)
16720 VkMemoryRequirements memReq = {};
16721 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16722 hDev, hImage, &memReq);
16726 memReq.memoryTypeBits,
16727 pAllocationCreateInfo,
16730 allocator->GetVulkanFunctions().vkDestroyImage(
16731 hDev, hImage, allocator->GetAllocationCallbacks());
16741 VMA_ASSERT(allocator && pCreateInfo && pPool);
16743 VMA_DEBUG_LOG(
"vmaCreatePool");
16745 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16747 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16749 #if VMA_RECORDING_ENABLED
16750 if(allocator->GetRecorder() != VMA_NULL)
16752 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16763 VMA_ASSERT(allocator);
16765 if(pool == VK_NULL_HANDLE)
16770 VMA_DEBUG_LOG(
"vmaDestroyPool");
16772 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16774 #if VMA_RECORDING_ENABLED
16775 if(allocator->GetRecorder() != VMA_NULL)
16777 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16781 allocator->DestroyPool(pool);
16789 VMA_ASSERT(allocator && pool && pPoolStats);
16791 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16793 allocator->GetPoolStats(pool, pPoolStats);
16799 size_t* pLostAllocationCount)
16801 VMA_ASSERT(allocator && pool);
16803 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16805 #if VMA_RECORDING_ENABLED
16806 if(allocator->GetRecorder() != VMA_NULL)
16808 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16812 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16817 VMA_ASSERT(allocator && pool);
16819 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16821 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16823 return allocator->CheckPoolCorruption(pool);
16829 const char** ppName)
16831 VMA_ASSERT(allocator && pool);
16833 VMA_DEBUG_LOG(
"vmaGetPoolName");
16835 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16837 *ppName = pool->GetName();
16845 VMA_ASSERT(allocator && pool);
16847 VMA_DEBUG_LOG(
"vmaSetPoolName");
16849 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16851 pool->SetName(pName);
16853 #if VMA_RECORDING_ENABLED
16854 if(allocator->GetRecorder() != VMA_NULL)
16856 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
16863 const VkMemoryRequirements* pVkMemoryRequirements,
16868 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16870 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16872 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16874 VkResult result = allocator->AllocateMemory(
16875 *pVkMemoryRequirements,
16881 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16885 #if VMA_RECORDING_ENABLED
16886 if(allocator->GetRecorder() != VMA_NULL)
16888 allocator->GetRecorder()->RecordAllocateMemory(
16889 allocator->GetCurrentFrameIndex(),
16890 *pVkMemoryRequirements,
16896 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16898 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16906 const VkMemoryRequirements* pVkMemoryRequirements,
16908 size_t allocationCount,
16912 if(allocationCount == 0)
16917 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16919 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16921 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16923 VkResult result = allocator->AllocateMemory(
16924 *pVkMemoryRequirements,
16930 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16934 #if VMA_RECORDING_ENABLED
16935 if(allocator->GetRecorder() != VMA_NULL)
16937 allocator->GetRecorder()->RecordAllocateMemoryPages(
16938 allocator->GetCurrentFrameIndex(),
16939 *pVkMemoryRequirements,
16941 (uint64_t)allocationCount,
16946 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16948 for(
size_t i = 0; i < allocationCount; ++i)
16950 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16964 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16966 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16968 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16970 VkMemoryRequirements vkMemReq = {};
16971 bool requiresDedicatedAllocation =
false;
16972 bool prefersDedicatedAllocation =
false;
16973 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16974 requiresDedicatedAllocation,
16975 prefersDedicatedAllocation);
16977 VkResult result = allocator->AllocateMemory(
16979 requiresDedicatedAllocation,
16980 prefersDedicatedAllocation,
16984 VMA_SUBALLOCATION_TYPE_BUFFER,
16988 #if VMA_RECORDING_ENABLED
16989 if(allocator->GetRecorder() != VMA_NULL)
16991 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16992 allocator->GetCurrentFrameIndex(),
16994 requiresDedicatedAllocation,
16995 prefersDedicatedAllocation,
17001 if(pAllocationInfo && result == VK_SUCCESS)
17003 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17016 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17018 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17020 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17022 VkMemoryRequirements vkMemReq = {};
17023 bool requiresDedicatedAllocation =
false;
17024 bool prefersDedicatedAllocation =
false;
17025 allocator->GetImageMemoryRequirements(image, vkMemReq,
17026 requiresDedicatedAllocation, prefersDedicatedAllocation);
17028 VkResult result = allocator->AllocateMemory(
17030 requiresDedicatedAllocation,
17031 prefersDedicatedAllocation,
17035 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17039 #if VMA_RECORDING_ENABLED
17040 if(allocator->GetRecorder() != VMA_NULL)
17042 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17043 allocator->GetCurrentFrameIndex(),
17045 requiresDedicatedAllocation,
17046 prefersDedicatedAllocation,
17052 if(pAllocationInfo && result == VK_SUCCESS)
17054 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17064 VMA_ASSERT(allocator);
17066 if(allocation == VK_NULL_HANDLE)
17071 VMA_DEBUG_LOG(
"vmaFreeMemory");
17073 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17075 #if VMA_RECORDING_ENABLED
17076 if(allocator->GetRecorder() != VMA_NULL)
17078 allocator->GetRecorder()->RecordFreeMemory(
17079 allocator->GetCurrentFrameIndex(),
17084 allocator->FreeMemory(
17091 size_t allocationCount,
17094 if(allocationCount == 0)
17099 VMA_ASSERT(allocator);
17101 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17103 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17105 #if VMA_RECORDING_ENABLED
17106 if(allocator->GetRecorder() != VMA_NULL)
17108 allocator->GetRecorder()->RecordFreeMemoryPages(
17109 allocator->GetCurrentFrameIndex(),
17110 (uint64_t)allocationCount,
17115 allocator->FreeMemory(allocationCount, pAllocations);
17121 VkDeviceSize newSize)
17123 VMA_ASSERT(allocator && allocation);
17125 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17127 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17129 return allocator->ResizeAllocation(allocation, newSize);
17137 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17139 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17141 #if VMA_RECORDING_ENABLED
17142 if(allocator->GetRecorder() != VMA_NULL)
17144 allocator->GetRecorder()->RecordGetAllocationInfo(
17145 allocator->GetCurrentFrameIndex(),
17150 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17157 VMA_ASSERT(allocator && allocation);
17159 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17161 #if VMA_RECORDING_ENABLED
17162 if(allocator->GetRecorder() != VMA_NULL)
17164 allocator->GetRecorder()->RecordTouchAllocation(
17165 allocator->GetCurrentFrameIndex(),
17170 return allocator->TouchAllocation(allocation);
17178 VMA_ASSERT(allocator && allocation);
17180 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17182 allocation->SetUserData(allocator, pUserData);
17184 #if VMA_RECORDING_ENABLED
17185 if(allocator->GetRecorder() != VMA_NULL)
17187 allocator->GetRecorder()->RecordSetAllocationUserData(
17188 allocator->GetCurrentFrameIndex(),
17199 VMA_ASSERT(allocator && pAllocation);
17201 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17203 allocator->CreateLostAllocation(pAllocation);
17205 #if VMA_RECORDING_ENABLED
17206 if(allocator->GetRecorder() != VMA_NULL)
17208 allocator->GetRecorder()->RecordCreateLostAllocation(
17209 allocator->GetCurrentFrameIndex(),
17220 VMA_ASSERT(allocator && allocation && ppData);
17222 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17224 VkResult res = allocator->Map(allocation, ppData);
17226 #if VMA_RECORDING_ENABLED
17227 if(allocator->GetRecorder() != VMA_NULL)
17229 allocator->GetRecorder()->RecordMapMemory(
17230 allocator->GetCurrentFrameIndex(),
17242 VMA_ASSERT(allocator && allocation);
17244 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17246 #if VMA_RECORDING_ENABLED
17247 if(allocator->GetRecorder() != VMA_NULL)
17249 allocator->GetRecorder()->RecordUnmapMemory(
17250 allocator->GetCurrentFrameIndex(),
17255 allocator->Unmap(allocation);
17260 VMA_ASSERT(allocator && allocation);
17262 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17264 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17266 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17268 #if VMA_RECORDING_ENABLED
17269 if(allocator->GetRecorder() != VMA_NULL)
17271 allocator->GetRecorder()->RecordFlushAllocation(
17272 allocator->GetCurrentFrameIndex(),
17273 allocation, offset, size);
17280 VMA_ASSERT(allocator && allocation);
17282 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17284 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17286 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17288 #if VMA_RECORDING_ENABLED
17289 if(allocator->GetRecorder() != VMA_NULL)
17291 allocator->GetRecorder()->RecordInvalidateAllocation(
17292 allocator->GetCurrentFrameIndex(),
17293 allocation, offset, size);
17300 VMA_ASSERT(allocator);
17302 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17304 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17306 return allocator->CheckCorruption(memoryTypeBits);
17312 size_t allocationCount,
17313 VkBool32* pAllocationsChanged,
17323 if(pDefragmentationInfo != VMA_NULL)
17337 if(res == VK_NOT_READY)
17350 VMA_ASSERT(allocator && pInfo && pContext);
17361 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17363 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17365 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17367 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17369 #if VMA_RECORDING_ENABLED
17370 if(allocator->GetRecorder() != VMA_NULL)
17372 allocator->GetRecorder()->RecordDefragmentationBegin(
17373 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17384 VMA_ASSERT(allocator);
17386 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17388 if(context != VK_NULL_HANDLE)
17390 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17392 #if VMA_RECORDING_ENABLED
17393 if(allocator->GetRecorder() != VMA_NULL)
17395 allocator->GetRecorder()->RecordDefragmentationEnd(
17396 allocator->GetCurrentFrameIndex(), context);
17400 return allocator->DefragmentationEnd(context);
17413 VMA_ASSERT(allocator && allocation && buffer);
17415 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17417 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17419 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17425 VkDeviceSize allocationLocalOffset,
17429 VMA_ASSERT(allocator && allocation && buffer);
17431 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17433 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17435 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17443 VMA_ASSERT(allocator && allocation && image);
17445 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17447 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17449 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17455 VkDeviceSize allocationLocalOffset,
17459 VMA_ASSERT(allocator && allocation && image);
17461 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
17463 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17465 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
17470 const VkBufferCreateInfo* pBufferCreateInfo,
17476 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
17478 if(pBufferCreateInfo->size == 0)
17480 return VK_ERROR_VALIDATION_FAILED_EXT;
17483 VMA_DEBUG_LOG(
"vmaCreateBuffer");
17485 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17487 *pBuffer = VK_NULL_HANDLE;
17488 *pAllocation = VK_NULL_HANDLE;
17491 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17492 allocator->m_hDevice,
17494 allocator->GetAllocationCallbacks(),
17499 VkMemoryRequirements vkMemReq = {};
17500 bool requiresDedicatedAllocation =
false;
17501 bool prefersDedicatedAllocation =
false;
17502 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17503 requiresDedicatedAllocation, prefersDedicatedAllocation);
17507 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
17509 VMA_ASSERT(vkMemReq.alignment %
17510 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
17512 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
17514 VMA_ASSERT(vkMemReq.alignment %
17515 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
17517 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
17519 VMA_ASSERT(vkMemReq.alignment %
17520 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
17524 res = allocator->AllocateMemory(
17526 requiresDedicatedAllocation,
17527 prefersDedicatedAllocation,
17530 *pAllocationCreateInfo,
17531 VMA_SUBALLOCATION_TYPE_BUFFER,
17535 #if VMA_RECORDING_ENABLED
17536 if(allocator->GetRecorder() != VMA_NULL)
17538 allocator->GetRecorder()->RecordCreateBuffer(
17539 allocator->GetCurrentFrameIndex(),
17540 *pBufferCreateInfo,
17541 *pAllocationCreateInfo,
17551 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17556 #if VMA_STATS_STRING_ENABLED
17557 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17559 if(pAllocationInfo != VMA_NULL)
17561 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17566 allocator->FreeMemory(
17569 *pAllocation = VK_NULL_HANDLE;
17570 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17571 *pBuffer = VK_NULL_HANDLE;
17574 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17575 *pBuffer = VK_NULL_HANDLE;
17586 VMA_ASSERT(allocator);
17588 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17593 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
17595 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17597 #if VMA_RECORDING_ENABLED
17598 if(allocator->GetRecorder() != VMA_NULL)
17600 allocator->GetRecorder()->RecordDestroyBuffer(
17601 allocator->GetCurrentFrameIndex(),
17606 if(buffer != VK_NULL_HANDLE)
17608 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17611 if(allocation != VK_NULL_HANDLE)
17613 allocator->FreeMemory(
17621 const VkImageCreateInfo* pImageCreateInfo,
17627 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17629 if(pImageCreateInfo->extent.width == 0 ||
17630 pImageCreateInfo->extent.height == 0 ||
17631 pImageCreateInfo->extent.depth == 0 ||
17632 pImageCreateInfo->mipLevels == 0 ||
17633 pImageCreateInfo->arrayLayers == 0)
17635 return VK_ERROR_VALIDATION_FAILED_EXT;
17638 VMA_DEBUG_LOG(
"vmaCreateImage");
17640 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17642 *pImage = VK_NULL_HANDLE;
17643 *pAllocation = VK_NULL_HANDLE;
17646 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17647 allocator->m_hDevice,
17649 allocator->GetAllocationCallbacks(),
17653 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
17654 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17655 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17658 VkMemoryRequirements vkMemReq = {};
17659 bool requiresDedicatedAllocation =
false;
17660 bool prefersDedicatedAllocation =
false;
17661 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17662 requiresDedicatedAllocation, prefersDedicatedAllocation);
17664 res = allocator->AllocateMemory(
17666 requiresDedicatedAllocation,
17667 prefersDedicatedAllocation,
17670 *pAllocationCreateInfo,
17675 #if VMA_RECORDING_ENABLED
17676 if(allocator->GetRecorder() != VMA_NULL)
17678 allocator->GetRecorder()->RecordCreateImage(
17679 allocator->GetCurrentFrameIndex(),
17681 *pAllocationCreateInfo,
17691 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17696 #if VMA_STATS_STRING_ENABLED
17697 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17699 if(pAllocationInfo != VMA_NULL)
17701 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17706 allocator->FreeMemory(
17709 *pAllocation = VK_NULL_HANDLE;
17710 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17711 *pImage = VK_NULL_HANDLE;
17714 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17715 *pImage = VK_NULL_HANDLE;
17726 VMA_ASSERT(allocator);
17728 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17733 VMA_DEBUG_LOG(
"vmaDestroyImage");
17735 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17737 #if VMA_RECORDING_ENABLED
17738 if(allocator->GetRecorder() != VMA_NULL)
17740 allocator->GetRecorder()->RecordDestroyImage(
17741 allocator->GetCurrentFrameIndex(),
17746 if(image != VK_NULL_HANDLE)
17748 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17750 if(allocation != VK_NULL_HANDLE)
17752 allocator->FreeMemory(
17758 #endif // #ifdef VMA_IMPLEMENTATION