23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1765 #ifndef VMA_RECORDING_ENABLED
1766 #define VMA_RECORDING_ENABLED 0
1770 #define NOMINMAX // For windows.h
1774 #include <vulkan/vulkan.h>
1777 #if VMA_RECORDING_ENABLED
1778 #include <windows.h>
1784 #if !defined(VMA_VULKAN_VERSION)
1785 #if defined(VK_VERSION_1_1)
1786 #define VMA_VULKAN_VERSION 1001000
1788 #define VMA_VULKAN_VERSION 1000000
1792 #if !defined(VMA_DEDICATED_ALLOCATION)
1793 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1794 #define VMA_DEDICATED_ALLOCATION 1
1796 #define VMA_DEDICATED_ALLOCATION 0
1800 #if !defined(VMA_BIND_MEMORY2)
1801 #if VK_KHR_bind_memory2
1802 #define VMA_BIND_MEMORY2 1
1804 #define VMA_BIND_MEMORY2 0
1808 #if !defined(VMA_MEMORY_BUDGET)
1809 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1810 #define VMA_MEMORY_BUDGET 1
1812 #define VMA_MEMORY_BUDGET 0
1821 #ifndef VMA_CALL_PRE
1822 #define VMA_CALL_PRE
1824 #ifndef VMA_CALL_POST
1825 #define VMA_CALL_POST
1842 uint32_t memoryType,
1843 VkDeviceMemory memory,
1848 uint32_t memoryType,
1849 VkDeviceMemory memory,
1952 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
1953 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1954 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1956 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
1957 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1958 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1960 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
1961 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2103 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2111 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2121 uint32_t memoryTypeIndex,
2122 VkMemoryPropertyFlags* pFlags);
2134 uint32_t frameIndex);
2230 #ifndef VMA_STATS_STRING_ENABLED
2231 #define VMA_STATS_STRING_ENABLED 1
2234 #if VMA_STATS_STRING_ENABLED
2241 char** ppStatsString,
2242 VkBool32 detailedMap);
2246 char* pStatsString);
2248 #endif // #if VMA_STATS_STRING_ENABLED
2500 uint32_t memoryTypeBits,
2502 uint32_t* pMemoryTypeIndex);
2518 const VkBufferCreateInfo* pBufferCreateInfo,
2520 uint32_t* pMemoryTypeIndex);
2536 const VkImageCreateInfo* pImageCreateInfo,
2538 uint32_t* pMemoryTypeIndex);
2710 size_t* pLostAllocationCount);
2737 const char** ppName);
2830 const VkMemoryRequirements* pVkMemoryRequirements,
2856 const VkMemoryRequirements* pVkMemoryRequirements,
2858 size_t allocationCount,
2903 size_t allocationCount,
2915 VkDeviceSize newSize);
3307 size_t allocationCount,
3308 VkBool32* pAllocationsChanged,
3342 VkDeviceSize allocationLocalOffset,
3376 VkDeviceSize allocationLocalOffset,
3408 const VkBufferCreateInfo* pBufferCreateInfo,
3433 const VkImageCreateInfo* pImageCreateInfo,
3459 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3462 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3463 #define VMA_IMPLEMENTATION
3466 #ifdef VMA_IMPLEMENTATION
3467 #undef VMA_IMPLEMENTATION
3489 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3490 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3502 #if VMA_USE_STL_CONTAINERS
3503 #define VMA_USE_STL_VECTOR 1
3504 #define VMA_USE_STL_UNORDERED_MAP 1
3505 #define VMA_USE_STL_LIST 1
3508 #ifndef VMA_USE_STL_SHARED_MUTEX
3510 #if __cplusplus >= 201703L
3511 #define VMA_USE_STL_SHARED_MUTEX 1
3515 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3516 #define VMA_USE_STL_SHARED_MUTEX 1
3518 #define VMA_USE_STL_SHARED_MUTEX 0
3526 #if VMA_USE_STL_VECTOR
3530 #if VMA_USE_STL_UNORDERED_MAP
3531 #include <unordered_map>
3534 #if VMA_USE_STL_LIST
3543 #include <algorithm>
3548 #define VMA_NULL nullptr
3551 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3553 void *aligned_alloc(
size_t alignment,
size_t size)
3556 if(alignment <
sizeof(
void*))
3558 alignment =
sizeof(
void*);
3561 return memalign(alignment, size);
3563 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3565 void *aligned_alloc(
size_t alignment,
size_t size)
3568 if(alignment <
sizeof(
void*))
3570 alignment =
sizeof(
void*);
3574 if(posix_memalign(&pointer, alignment, size) == 0)
3588 #define VMA_ASSERT(expr) assert(expr)
3590 #define VMA_ASSERT(expr)
3596 #ifndef VMA_HEAVY_ASSERT
3598 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3600 #define VMA_HEAVY_ASSERT(expr)
3604 #ifndef VMA_ALIGN_OF
3605 #define VMA_ALIGN_OF(type) (__alignof(type))
3608 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3610 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3612 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3616 #ifndef VMA_SYSTEM_FREE
3618 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3620 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3625 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3629 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3633 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3637 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3640 #ifndef VMA_DEBUG_LOG
3641 #define VMA_DEBUG_LOG(format, ...)
3651 #if VMA_STATS_STRING_ENABLED
3652 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3654 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3656 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3658 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3660 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3662 snprintf(outStr, strLen,
"%p", ptr);
3670 void Lock() { m_Mutex.lock(); }
3671 void Unlock() { m_Mutex.unlock(); }
3675 #define VMA_MUTEX VmaMutex
3679 #ifndef VMA_RW_MUTEX
3680 #if VMA_USE_STL_SHARED_MUTEX
3682 #include <shared_mutex>
3686 void LockRead() { m_Mutex.lock_shared(); }
3687 void UnlockRead() { m_Mutex.unlock_shared(); }
3688 void LockWrite() { m_Mutex.lock(); }
3689 void UnlockWrite() { m_Mutex.unlock(); }
3691 std::shared_mutex m_Mutex;
3693 #define VMA_RW_MUTEX VmaRWMutex
3694 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3700 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3701 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3702 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3703 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3704 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3708 #define VMA_RW_MUTEX VmaRWMutex
3714 void LockRead() { m_Mutex.Lock(); }
3715 void UnlockRead() { m_Mutex.Unlock(); }
3716 void LockWrite() { m_Mutex.Lock(); }
3717 void UnlockWrite() { m_Mutex.Unlock(); }
3721 #define VMA_RW_MUTEX VmaRWMutex
3722 #endif // #if VMA_USE_STL_SHARED_MUTEX
3723 #endif // #ifndef VMA_RW_MUTEX
3728 #ifndef VMA_ATOMIC_UINT32
3730 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3733 #ifndef VMA_ATOMIC_UINT64
3735 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3738 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3743 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3746 #ifndef VMA_DEBUG_ALIGNMENT
3751 #define VMA_DEBUG_ALIGNMENT (1)
3754 #ifndef VMA_DEBUG_MARGIN
3759 #define VMA_DEBUG_MARGIN (0)
3762 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3767 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3770 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3776 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3779 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3784 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3787 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3792 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3795 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3796 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3800 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3801 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3805 #ifndef VMA_CLASS_NO_COPY
3806 #define VMA_CLASS_NO_COPY(className) \
3808 className(const className&) = delete; \
3809 className& operator=(const className&) = delete;
3812 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3815 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3817 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3818 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3824 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3826 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3827 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3830 static inline uint32_t VmaCountBitsSet(uint32_t v)
3832 uint32_t c = v - ((v >> 1) & 0x55555555);
3833 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3834 c = ((c >> 4) + c) & 0x0F0F0F0F;
3835 c = ((c >> 8) + c) & 0x00FF00FF;
3836 c = ((c >> 16) + c) & 0x0000FFFF;
3842 template <
typename T>
3843 static inline T VmaAlignUp(T val, T align)
3845 return (val + align - 1) / align * align;
3849 template <
typename T>
3850 static inline T VmaAlignDown(T val, T align)
3852 return val / align * align;
3856 template <
typename T>
3857 static inline T VmaRoundDiv(T x, T y)
3859 return (x + (y / (T)2)) / y;
3867 template <
typename T>
3868 inline bool VmaIsPow2(T x)
3870 return (x & (x-1)) == 0;
3874 static inline uint32_t VmaNextPow2(uint32_t v)
3885 static inline uint64_t VmaNextPow2(uint64_t v)
3899 static inline uint32_t VmaPrevPow2(uint32_t v)
3909 static inline uint64_t VmaPrevPow2(uint64_t v)
3921 static inline bool VmaStrIsEmpty(
const char* pStr)
3923 return pStr == VMA_NULL || *pStr ==
'\0';
3926 #if VMA_STATS_STRING_ENABLED
3928 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3944 #endif // #if VMA_STATS_STRING_ENABLED
3948 template<
typename Iterator,
typename Compare>
3949 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3951 Iterator centerValue = end; --centerValue;
3952 Iterator insertIndex = beg;
3953 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3955 if(cmp(*memTypeIndex, *centerValue))
3957 if(insertIndex != memTypeIndex)
3959 VMA_SWAP(*memTypeIndex, *insertIndex);
3964 if(insertIndex != centerValue)
3966 VMA_SWAP(*insertIndex, *centerValue);
3971 template<
typename Iterator,
typename Compare>
3972 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3976 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3977 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3978 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3982 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
3984 #endif // #ifndef VMA_SORT
3993 static inline bool VmaBlocksOnSamePage(
3994 VkDeviceSize resourceAOffset,
3995 VkDeviceSize resourceASize,
3996 VkDeviceSize resourceBOffset,
3997 VkDeviceSize pageSize)
3999 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4000 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4001 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4002 VkDeviceSize resourceBStart = resourceBOffset;
4003 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4004 return resourceAEndPage == resourceBStartPage;
4007 enum VmaSuballocationType
4009 VMA_SUBALLOCATION_TYPE_FREE = 0,
4010 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4011 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4012 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4013 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4014 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4015 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4024 static inline bool VmaIsBufferImageGranularityConflict(
4025 VmaSuballocationType suballocType1,
4026 VmaSuballocationType suballocType2)
4028 if(suballocType1 > suballocType2)
4030 VMA_SWAP(suballocType1, suballocType2);
4033 switch(suballocType1)
4035 case VMA_SUBALLOCATION_TYPE_FREE:
4037 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4039 case VMA_SUBALLOCATION_TYPE_BUFFER:
4041 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4042 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4043 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4045 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4046 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4047 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4048 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4050 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4051 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4059 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4061 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4062 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4063 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4064 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4066 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4073 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4075 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4076 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4077 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4078 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4080 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4093 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4095 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4096 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4097 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4098 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4104 VMA_CLASS_NO_COPY(VmaMutexLock)
4106 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4107 m_pMutex(useMutex ? &mutex : VMA_NULL)
4108 {
if(m_pMutex) { m_pMutex->Lock(); } }
4110 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4112 VMA_MUTEX* m_pMutex;
4116 struct VmaMutexLockRead
4118 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4120 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4121 m_pMutex(useMutex ? &mutex : VMA_NULL)
4122 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4123 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4125 VMA_RW_MUTEX* m_pMutex;
4129 struct VmaMutexLockWrite
4131 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4133 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4134 m_pMutex(useMutex ? &mutex : VMA_NULL)
4135 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4136 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4138 VMA_RW_MUTEX* m_pMutex;
4141 #if VMA_DEBUG_GLOBAL_MUTEX
4142 static VMA_MUTEX gDebugGlobalMutex;
4143 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4145 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4149 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4160 template <
typename CmpLess,
typename IterT,
typename KeyT>
4161 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4163 size_t down = 0, up = (end - beg);
4166 const size_t mid = (down + up) / 2;
4167 if(cmp(*(beg+mid), key))
4179 template<
typename CmpLess,
typename IterT,
typename KeyT>
4180 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4182 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4183 beg, end, value, cmp);
4185 (!cmp(*it, value) && !cmp(value, *it)))
4197 template<
typename T>
4198 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4200 for(uint32_t i = 0; i < count; ++i)
4202 const T iPtr = arr[i];
4203 if(iPtr == VMA_NULL)
4207 for(uint32_t j = i + 1; j < count; ++j)
4221 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4223 if((pAllocationCallbacks != VMA_NULL) &&
4224 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4226 return (*pAllocationCallbacks->pfnAllocation)(
4227 pAllocationCallbacks->pUserData,
4230 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4234 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4238 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4240 if((pAllocationCallbacks != VMA_NULL) &&
4241 (pAllocationCallbacks->pfnFree != VMA_NULL))
4243 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4247 VMA_SYSTEM_FREE(ptr);
4251 template<
typename T>
4252 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4254 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4257 template<
typename T>
4258 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4260 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4263 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4265 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4267 template<
typename T>
4268 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4271 VmaFree(pAllocationCallbacks, ptr);
4274 template<
typename T>
4275 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4279 for(
size_t i = count; i--; )
4283 VmaFree(pAllocationCallbacks, ptr);
4287 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4289 if(srcStr != VMA_NULL)
4291 const size_t len = strlen(srcStr);
4292 char*
const result = vma_new_array(allocs,
char, len + 1);
4293 memcpy(result, srcStr, len + 1);
4302 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4306 const size_t len = strlen(str);
4307 vma_delete_array(allocs, str, len + 1);
4312 template<
typename T>
4313 class VmaStlAllocator
4316 const VkAllocationCallbacks*
const m_pCallbacks;
4317 typedef T value_type;
4319 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4320 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4322 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4323 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4325 template<
typename U>
4326 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4328 return m_pCallbacks == rhs.m_pCallbacks;
4330 template<
typename U>
4331 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4333 return m_pCallbacks != rhs.m_pCallbacks;
4336 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4339 #if VMA_USE_STL_VECTOR
4341 #define VmaVector std::vector
4343 template<
typename T,
typename allocatorT>
4344 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4346 vec.insert(vec.begin() + index, item);
4349 template<
typename T,
typename allocatorT>
4350 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4352 vec.erase(vec.begin() + index);
4355 #else // #if VMA_USE_STL_VECTOR
4360 template<
typename T,
typename AllocatorT>
4364 typedef T value_type;
4366 VmaVector(
const AllocatorT& allocator) :
4367 m_Allocator(allocator),
4374 VmaVector(
size_t count,
const AllocatorT& allocator) :
4375 m_Allocator(allocator),
4376 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4384 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4385 : VmaVector(count, allocator) {}
4387 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4388 m_Allocator(src.m_Allocator),
4389 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4390 m_Count(src.m_Count),
4391 m_Capacity(src.m_Count)
4395 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4401 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4404 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4408 resize(rhs.m_Count);
4411 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4417 bool empty()
const {
return m_Count == 0; }
4418 size_t size()
const {
return m_Count; }
4419 T* data() {
return m_pArray; }
4420 const T* data()
const {
return m_pArray; }
4422 T& operator[](
size_t index)
4424 VMA_HEAVY_ASSERT(index < m_Count);
4425 return m_pArray[index];
4427 const T& operator[](
size_t index)
const
4429 VMA_HEAVY_ASSERT(index < m_Count);
4430 return m_pArray[index];
4435 VMA_HEAVY_ASSERT(m_Count > 0);
4438 const T& front()
const
4440 VMA_HEAVY_ASSERT(m_Count > 0);
4445 VMA_HEAVY_ASSERT(m_Count > 0);
4446 return m_pArray[m_Count - 1];
4448 const T& back()
const
4450 VMA_HEAVY_ASSERT(m_Count > 0);
4451 return m_pArray[m_Count - 1];
4454 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4456 newCapacity = VMA_MAX(newCapacity, m_Count);
4458 if((newCapacity < m_Capacity) && !freeMemory)
4460 newCapacity = m_Capacity;
4463 if(newCapacity != m_Capacity)
4465 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4468 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4470 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4471 m_Capacity = newCapacity;
4472 m_pArray = newArray;
4476 void resize(
size_t newCount,
bool freeMemory =
false)
4478 size_t newCapacity = m_Capacity;
4479 if(newCount > m_Capacity)
4481 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4485 newCapacity = newCount;
4488 if(newCapacity != m_Capacity)
4490 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4491 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4492 if(elementsToCopy != 0)
4494 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4496 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4497 m_Capacity = newCapacity;
4498 m_pArray = newArray;
4504 void clear(
bool freeMemory =
false)
4506 resize(0, freeMemory);
4509 void insert(
size_t index,
const T& src)
4511 VMA_HEAVY_ASSERT(index <= m_Count);
4512 const size_t oldCount = size();
4513 resize(oldCount + 1);
4514 if(index < oldCount)
4516 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4518 m_pArray[index] = src;
4521 void remove(
size_t index)
4523 VMA_HEAVY_ASSERT(index < m_Count);
4524 const size_t oldCount = size();
4525 if(index < oldCount - 1)
4527 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4529 resize(oldCount - 1);
4532 void push_back(
const T& src)
4534 const size_t newIndex = size();
4535 resize(newIndex + 1);
4536 m_pArray[newIndex] = src;
4541 VMA_HEAVY_ASSERT(m_Count > 0);
4545 void push_front(
const T& src)
4552 VMA_HEAVY_ASSERT(m_Count > 0);
4556 typedef T* iterator;
4558 iterator begin() {
return m_pArray; }
4559 iterator end() {
return m_pArray + m_Count; }
4562 AllocatorT m_Allocator;
4568 template<
typename T,
typename allocatorT>
4569 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4571 vec.insert(index, item);
4574 template<
typename T,
typename allocatorT>
4575 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4580 #endif // #if VMA_USE_STL_VECTOR
4582 template<
typename CmpLess,
typename VectorT>
4583 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4585 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4587 vector.data() + vector.size(),
4589 CmpLess()) - vector.data();
4590 VmaVectorInsert(vector, indexToInsert, value);
4591 return indexToInsert;
4594 template<
typename CmpLess,
typename VectorT>
4595 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4598 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4603 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4605 size_t indexToRemove = it - vector.begin();
4606 VmaVectorRemove(vector, indexToRemove);
4620 template<
typename T>
4621 class VmaPoolAllocator
4623 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4625 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4626 ~VmaPoolAllocator();
4633 uint32_t NextFreeIndex;
4634 alignas(T)
char Value[
sizeof(T)];
4641 uint32_t FirstFreeIndex;
4644 const VkAllocationCallbacks* m_pAllocationCallbacks;
4645 const uint32_t m_FirstBlockCapacity;
4646 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4648 ItemBlock& CreateNewBlock();
4651 template<
typename T>
4652 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4653 m_pAllocationCallbacks(pAllocationCallbacks),
4654 m_FirstBlockCapacity(firstBlockCapacity),
4655 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4657 VMA_ASSERT(m_FirstBlockCapacity > 1);
4660 template<
typename T>
4661 VmaPoolAllocator<T>::~VmaPoolAllocator()
4663 for(
size_t i = m_ItemBlocks.size(); i--; )
4664 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4665 m_ItemBlocks.clear();
4668 template<
typename T>
4669 T* VmaPoolAllocator<T>::Alloc()
4671 for(
size_t i = m_ItemBlocks.size(); i--; )
4673 ItemBlock& block = m_ItemBlocks[i];
4675 if(block.FirstFreeIndex != UINT32_MAX)
4677 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4678 block.FirstFreeIndex = pItem->NextFreeIndex;
4679 T* result = (T*)&pItem->Value;
4686 ItemBlock& newBlock = CreateNewBlock();
4687 Item*
const pItem = &newBlock.pItems[0];
4688 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4689 T* result = (T*)&pItem->Value;
4694 template<
typename T>
4695 void VmaPoolAllocator<T>::Free(T* ptr)
4698 for(
size_t i = m_ItemBlocks.size(); i--; )
4700 ItemBlock& block = m_ItemBlocks[i];
4704 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4707 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4710 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4711 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4712 block.FirstFreeIndex = index;
4716 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4719 template<
typename T>
4720 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4722 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4723 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4725 const ItemBlock newBlock = {
4726 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4730 m_ItemBlocks.push_back(newBlock);
4733 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4734 newBlock.pItems[i].NextFreeIndex = i + 1;
4735 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4736 return m_ItemBlocks.back();
4742 #if VMA_USE_STL_LIST
4744 #define VmaList std::list
4746 #else // #if VMA_USE_STL_LIST
4748 template<
typename T>
4757 template<
typename T>
4760 VMA_CLASS_NO_COPY(VmaRawList)
4762 typedef VmaListItem<T> ItemType;
4764 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4768 size_t GetCount()
const {
return m_Count; }
4769 bool IsEmpty()
const {
return m_Count == 0; }
4771 ItemType* Front() {
return m_pFront; }
4772 const ItemType* Front()
const {
return m_pFront; }
4773 ItemType* Back() {
return m_pBack; }
4774 const ItemType* Back()
const {
return m_pBack; }
4776 ItemType* PushBack();
4777 ItemType* PushFront();
4778 ItemType* PushBack(
const T& value);
4779 ItemType* PushFront(
const T& value);
4784 ItemType* InsertBefore(ItemType* pItem);
4786 ItemType* InsertAfter(ItemType* pItem);
4788 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4789 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4791 void Remove(ItemType* pItem);
4794 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4795 VmaPoolAllocator<ItemType> m_ItemAllocator;
4801 template<
typename T>
4802 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4803 m_pAllocationCallbacks(pAllocationCallbacks),
4804 m_ItemAllocator(pAllocationCallbacks, 128),
4811 template<
typename T>
4812 VmaRawList<T>::~VmaRawList()
4818 template<
typename T>
4819 void VmaRawList<T>::Clear()
4821 if(IsEmpty() ==
false)
4823 ItemType* pItem = m_pBack;
4824 while(pItem != VMA_NULL)
4826 ItemType*
const pPrevItem = pItem->pPrev;
4827 m_ItemAllocator.Free(pItem);
4830 m_pFront = VMA_NULL;
4836 template<
typename T>
4837 VmaListItem<T>* VmaRawList<T>::PushBack()
4839 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4840 pNewItem->pNext = VMA_NULL;
4843 pNewItem->pPrev = VMA_NULL;
4844 m_pFront = pNewItem;
4850 pNewItem->pPrev = m_pBack;
4851 m_pBack->pNext = pNewItem;
4858 template<
typename T>
4859 VmaListItem<T>* VmaRawList<T>::PushFront()
4861 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4862 pNewItem->pPrev = VMA_NULL;
4865 pNewItem->pNext = VMA_NULL;
4866 m_pFront = pNewItem;
4872 pNewItem->pNext = m_pFront;
4873 m_pFront->pPrev = pNewItem;
4874 m_pFront = pNewItem;
4880 template<
typename T>
4881 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4883 ItemType*
const pNewItem = PushBack();
4884 pNewItem->Value = value;
4888 template<
typename T>
4889 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4891 ItemType*
const pNewItem = PushFront();
4892 pNewItem->Value = value;
4896 template<
typename T>
4897 void VmaRawList<T>::PopBack()
4899 VMA_HEAVY_ASSERT(m_Count > 0);
4900 ItemType*
const pBackItem = m_pBack;
4901 ItemType*
const pPrevItem = pBackItem->pPrev;
4902 if(pPrevItem != VMA_NULL)
4904 pPrevItem->pNext = VMA_NULL;
4906 m_pBack = pPrevItem;
4907 m_ItemAllocator.Free(pBackItem);
4911 template<
typename T>
4912 void VmaRawList<T>::PopFront()
4914 VMA_HEAVY_ASSERT(m_Count > 0);
4915 ItemType*
const pFrontItem = m_pFront;
4916 ItemType*
const pNextItem = pFrontItem->pNext;
4917 if(pNextItem != VMA_NULL)
4919 pNextItem->pPrev = VMA_NULL;
4921 m_pFront = pNextItem;
4922 m_ItemAllocator.Free(pFrontItem);
4926 template<
typename T>
4927 void VmaRawList<T>::Remove(ItemType* pItem)
4929 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4930 VMA_HEAVY_ASSERT(m_Count > 0);
4932 if(pItem->pPrev != VMA_NULL)
4934 pItem->pPrev->pNext = pItem->pNext;
4938 VMA_HEAVY_ASSERT(m_pFront == pItem);
4939 m_pFront = pItem->pNext;
4942 if(pItem->pNext != VMA_NULL)
4944 pItem->pNext->pPrev = pItem->pPrev;
4948 VMA_HEAVY_ASSERT(m_pBack == pItem);
4949 m_pBack = pItem->pPrev;
4952 m_ItemAllocator.Free(pItem);
4956 template<
typename T>
4957 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4959 if(pItem != VMA_NULL)
4961 ItemType*
const prevItem = pItem->pPrev;
4962 ItemType*
const newItem = m_ItemAllocator.Alloc();
4963 newItem->pPrev = prevItem;
4964 newItem->pNext = pItem;
4965 pItem->pPrev = newItem;
4966 if(prevItem != VMA_NULL)
4968 prevItem->pNext = newItem;
4972 VMA_HEAVY_ASSERT(m_pFront == pItem);
4982 template<
typename T>
4983 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4985 if(pItem != VMA_NULL)
4987 ItemType*
const nextItem = pItem->pNext;
4988 ItemType*
const newItem = m_ItemAllocator.Alloc();
4989 newItem->pNext = nextItem;
4990 newItem->pPrev = pItem;
4991 pItem->pNext = newItem;
4992 if(nextItem != VMA_NULL)
4994 nextItem->pPrev = newItem;
4998 VMA_HEAVY_ASSERT(m_pBack == pItem);
5008 template<
typename T>
5009 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5011 ItemType*
const newItem = InsertBefore(pItem);
5012 newItem->Value = value;
5016 template<
typename T>
5017 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5019 ItemType*
const newItem = InsertAfter(pItem);
5020 newItem->Value = value;
5024 template<
typename T,
typename AllocatorT>
5027 VMA_CLASS_NO_COPY(VmaList)
5038 T& operator*()
const
5040 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5041 return m_pItem->Value;
5043 T* operator->()
const
5045 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5046 return &m_pItem->Value;
5049 iterator& operator++()
5051 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5052 m_pItem = m_pItem->pNext;
5055 iterator& operator--()
5057 if(m_pItem != VMA_NULL)
5059 m_pItem = m_pItem->pPrev;
5063 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5064 m_pItem = m_pList->Back();
5069 iterator operator++(
int)
5071 iterator result = *
this;
5075 iterator operator--(
int)
5077 iterator result = *
this;
5082 bool operator==(
const iterator& rhs)
const
5084 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5085 return m_pItem == rhs.m_pItem;
5087 bool operator!=(
const iterator& rhs)
const
5089 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5090 return m_pItem != rhs.m_pItem;
5094 VmaRawList<T>* m_pList;
5095 VmaListItem<T>* m_pItem;
5097 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5103 friend class VmaList<T, AllocatorT>;
5106 class const_iterator
5115 const_iterator(
const iterator& src) :
5116 m_pList(src.m_pList),
5117 m_pItem(src.m_pItem)
5121 const T& operator*()
const
5123 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5124 return m_pItem->Value;
5126 const T* operator->()
const
5128 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5129 return &m_pItem->Value;
5132 const_iterator& operator++()
5134 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5135 m_pItem = m_pItem->pNext;
5138 const_iterator& operator--()
5140 if(m_pItem != VMA_NULL)
5142 m_pItem = m_pItem->pPrev;
5146 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5147 m_pItem = m_pList->Back();
5152 const_iterator operator++(
int)
5154 const_iterator result = *
this;
5158 const_iterator operator--(
int)
5160 const_iterator result = *
this;
5165 bool operator==(
const const_iterator& rhs)
const
5167 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5168 return m_pItem == rhs.m_pItem;
5170 bool operator!=(
const const_iterator& rhs)
const
5172 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5173 return m_pItem != rhs.m_pItem;
5177 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5183 const VmaRawList<T>* m_pList;
5184 const VmaListItem<T>* m_pItem;
5186 friend class VmaList<T, AllocatorT>;
5189 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5191 bool empty()
const {
return m_RawList.IsEmpty(); }
5192 size_t size()
const {
return m_RawList.GetCount(); }
5194 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5195 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5197 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5198 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5200 void clear() { m_RawList.Clear(); }
5201 void push_back(
const T& value) { m_RawList.PushBack(value); }
5202 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5203 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5206 VmaRawList<T> m_RawList;
5209 #endif // #if VMA_USE_STL_LIST
5217 #if VMA_USE_STL_UNORDERED_MAP
5219 #define VmaPair std::pair
5221 #define VMA_MAP_TYPE(KeyT, ValueT) \
5222 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5224 #else // #if VMA_USE_STL_UNORDERED_MAP
5226 template<
typename T1,
typename T2>
5232 VmaPair() : first(), second() { }
5233 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5239 template<
typename KeyT,
typename ValueT>
5243 typedef VmaPair<KeyT, ValueT> PairType;
5244 typedef PairType* iterator;
5246 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5248 iterator begin() {
return m_Vector.begin(); }
5249 iterator end() {
return m_Vector.end(); }
5251 void insert(
const PairType& pair);
5252 iterator find(
const KeyT& key);
5253 void erase(iterator it);
5256 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5259 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5261 template<
typename FirstT,
typename SecondT>
5262 struct VmaPairFirstLess
5264 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5266 return lhs.first < rhs.first;
5268 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5270 return lhs.first < rhsFirst;
5274 template<
typename KeyT,
typename ValueT>
5275 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5277 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5279 m_Vector.data() + m_Vector.size(),
5281 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5282 VmaVectorInsert(m_Vector, indexToInsert, pair);
5285 template<
typename KeyT,
typename ValueT>
5286 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5288 PairType* it = VmaBinaryFindFirstNotLess(
5290 m_Vector.data() + m_Vector.size(),
5292 VmaPairFirstLess<KeyT, ValueT>());
5293 if((it != m_Vector.end()) && (it->first == key))
5299 return m_Vector.end();
5303 template<
typename KeyT,
typename ValueT>
5304 void VmaMap<KeyT, ValueT>::erase(iterator it)
5306 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5309 #endif // #if VMA_USE_STL_UNORDERED_MAP
5315 class VmaDeviceMemoryBlock;
5317 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5319 struct VmaAllocation_T
5322 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5326 FLAG_USER_DATA_STRING = 0x01,
5330 enum ALLOCATION_TYPE
5332 ALLOCATION_TYPE_NONE,
5333 ALLOCATION_TYPE_BLOCK,
5334 ALLOCATION_TYPE_DEDICATED,
5341 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5345 m_MemoryTypeIndex = 0;
5346 m_pUserData = VMA_NULL;
5347 m_LastUseFrameIndex = currentFrameIndex;
5348 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5349 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5351 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5353 #if VMA_STATS_STRING_ENABLED
5354 m_CreationFrameIndex = currentFrameIndex;
5355 m_BufferImageUsage = 0;
5361 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5364 VMA_ASSERT(m_pUserData == VMA_NULL);
5367 void InitBlockAllocation(
5368 VmaDeviceMemoryBlock* block,
5369 VkDeviceSize offset,
5370 VkDeviceSize alignment,
5372 uint32_t memoryTypeIndex,
5373 VmaSuballocationType suballocationType,
5377 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5378 VMA_ASSERT(block != VMA_NULL);
5379 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5380 m_Alignment = alignment;
5382 m_MemoryTypeIndex = memoryTypeIndex;
5383 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5384 m_SuballocationType = (uint8_t)suballocationType;
5385 m_BlockAllocation.m_Block = block;
5386 m_BlockAllocation.m_Offset = offset;
5387 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5392 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5393 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5394 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5395 m_MemoryTypeIndex = 0;
5396 m_BlockAllocation.m_Block = VMA_NULL;
5397 m_BlockAllocation.m_Offset = 0;
5398 m_BlockAllocation.m_CanBecomeLost =
true;
5401 void ChangeBlockAllocation(
5403 VmaDeviceMemoryBlock* block,
5404 VkDeviceSize offset);
5406 void ChangeOffset(VkDeviceSize newOffset);
5409 void InitDedicatedAllocation(
5410 uint32_t memoryTypeIndex,
5411 VkDeviceMemory hMemory,
5412 VmaSuballocationType suballocationType,
5416 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5417 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5418 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5421 m_MemoryTypeIndex = memoryTypeIndex;
5422 m_SuballocationType = (uint8_t)suballocationType;
5423 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5424 m_DedicatedAllocation.m_hMemory = hMemory;
5425 m_DedicatedAllocation.m_pMappedData = pMappedData;
5428 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5429 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5430 VkDeviceSize GetSize()
const {
return m_Size; }
5431 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5432 void* GetUserData()
const {
return m_pUserData; }
5433 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5434 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5436 VmaDeviceMemoryBlock* GetBlock()
const
5438 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5439 return m_BlockAllocation.m_Block;
5441 VkDeviceSize GetOffset()
const;
5442 VkDeviceMemory GetMemory()
const;
5443 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5444 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5445 void* GetMappedData()
const;
5446 bool CanBecomeLost()
const;
5448 uint32_t GetLastUseFrameIndex()
const
5450 return m_LastUseFrameIndex.load();
5452 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5454 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5464 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5466 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5468 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5479 void BlockAllocMap();
5480 void BlockAllocUnmap();
5481 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5484 #if VMA_STATS_STRING_ENABLED
5485 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5486 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5488 void InitBufferImageUsage(uint32_t bufferImageUsage)
5490 VMA_ASSERT(m_BufferImageUsage == 0);
5491 m_BufferImageUsage = bufferImageUsage;
5494 void PrintParameters(
class VmaJsonWriter& json)
const;
5498 VkDeviceSize m_Alignment;
5499 VkDeviceSize m_Size;
5501 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5502 uint32_t m_MemoryTypeIndex;
5504 uint8_t m_SuballocationType;
5511 struct BlockAllocation
5513 VmaDeviceMemoryBlock* m_Block;
5514 VkDeviceSize m_Offset;
5515 bool m_CanBecomeLost;
5519 struct DedicatedAllocation
5521 VkDeviceMemory m_hMemory;
5522 void* m_pMappedData;
5528 BlockAllocation m_BlockAllocation;
5530 DedicatedAllocation m_DedicatedAllocation;
5533 #if VMA_STATS_STRING_ENABLED
5534 uint32_t m_CreationFrameIndex;
5535 uint32_t m_BufferImageUsage;
5545 struct VmaSuballocation
5547 VkDeviceSize offset;
5550 VmaSuballocationType type;
5554 struct VmaSuballocationOffsetLess
5556 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5558 return lhs.offset < rhs.offset;
5561 struct VmaSuballocationOffsetGreater
5563 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5565 return lhs.offset > rhs.offset;
5569 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5572 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5574 enum class VmaAllocationRequestType
5596 struct VmaAllocationRequest
5598 VkDeviceSize offset;
5599 VkDeviceSize sumFreeSize;
5600 VkDeviceSize sumItemSize;
5601 VmaSuballocationList::iterator item;
5602 size_t itemsToMakeLostCount;
5604 VmaAllocationRequestType type;
5606 VkDeviceSize CalcCost()
const
5608 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5616 class VmaBlockMetadata
5620 virtual ~VmaBlockMetadata() { }
5621 virtual void Init(VkDeviceSize size) { m_Size = size; }
5624 virtual bool Validate()
const = 0;
5625 VkDeviceSize GetSize()
const {
return m_Size; }
5626 virtual size_t GetAllocationCount()
const = 0;
5627 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5628 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5630 virtual bool IsEmpty()
const = 0;
5632 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5634 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5636 #if VMA_STATS_STRING_ENABLED
5637 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5643 virtual bool CreateAllocationRequest(
5644 uint32_t currentFrameIndex,
5645 uint32_t frameInUseCount,
5646 VkDeviceSize bufferImageGranularity,
5647 VkDeviceSize allocSize,
5648 VkDeviceSize allocAlignment,
5650 VmaSuballocationType allocType,
5651 bool canMakeOtherLost,
5654 VmaAllocationRequest* pAllocationRequest) = 0;
5656 virtual bool MakeRequestedAllocationsLost(
5657 uint32_t currentFrameIndex,
5658 uint32_t frameInUseCount,
5659 VmaAllocationRequest* pAllocationRequest) = 0;
5661 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5663 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5667 const VmaAllocationRequest& request,
5668 VmaSuballocationType type,
5669 VkDeviceSize allocSize,
5674 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5677 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5679 #if VMA_STATS_STRING_ENABLED
5680 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5681 VkDeviceSize unusedBytes,
5682 size_t allocationCount,
5683 size_t unusedRangeCount)
const;
5684 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5685 VkDeviceSize offset,
5687 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5688 VkDeviceSize offset,
5689 VkDeviceSize size)
const;
5690 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5694 VkDeviceSize m_Size;
5695 const VkAllocationCallbacks* m_pAllocationCallbacks;
5698 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5699 VMA_ASSERT(0 && "Validation failed: " #cond); \
5703 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5705 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5708 virtual ~VmaBlockMetadata_Generic();
5709 virtual void Init(VkDeviceSize size);
5711 virtual bool Validate()
const;
5712 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5713 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5714 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5715 virtual bool IsEmpty()
const;
5717 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5718 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5720 #if VMA_STATS_STRING_ENABLED
5721 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5724 virtual bool CreateAllocationRequest(
5725 uint32_t currentFrameIndex,
5726 uint32_t frameInUseCount,
5727 VkDeviceSize bufferImageGranularity,
5728 VkDeviceSize allocSize,
5729 VkDeviceSize allocAlignment,
5731 VmaSuballocationType allocType,
5732 bool canMakeOtherLost,
5734 VmaAllocationRequest* pAllocationRequest);
5736 virtual bool MakeRequestedAllocationsLost(
5737 uint32_t currentFrameIndex,
5738 uint32_t frameInUseCount,
5739 VmaAllocationRequest* pAllocationRequest);
5741 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5743 virtual VkResult CheckCorruption(
const void* pBlockData);
5746 const VmaAllocationRequest& request,
5747 VmaSuballocationType type,
5748 VkDeviceSize allocSize,
5752 virtual void FreeAtOffset(VkDeviceSize offset);
5757 bool IsBufferImageGranularityConflictPossible(
5758 VkDeviceSize bufferImageGranularity,
5759 VmaSuballocationType& inOutPrevSuballocType)
const;
5762 friend class VmaDefragmentationAlgorithm_Generic;
5763 friend class VmaDefragmentationAlgorithm_Fast;
5765 uint32_t m_FreeCount;
5766 VkDeviceSize m_SumFreeSize;
5767 VmaSuballocationList m_Suballocations;
5770 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5772 bool ValidateFreeSuballocationList()
const;
5776 bool CheckAllocation(
5777 uint32_t currentFrameIndex,
5778 uint32_t frameInUseCount,
5779 VkDeviceSize bufferImageGranularity,
5780 VkDeviceSize allocSize,
5781 VkDeviceSize allocAlignment,
5782 VmaSuballocationType allocType,
5783 VmaSuballocationList::const_iterator suballocItem,
5784 bool canMakeOtherLost,
5785 VkDeviceSize* pOffset,
5786 size_t* itemsToMakeLostCount,
5787 VkDeviceSize* pSumFreeSize,
5788 VkDeviceSize* pSumItemSize)
const;
5790 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5794 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5797 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5800 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5881 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5883 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5886 virtual ~VmaBlockMetadata_Linear();
5887 virtual void Init(VkDeviceSize size);
5889 virtual bool Validate()
const;
5890 virtual size_t GetAllocationCount()
const;
5891 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5892 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5893 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5895 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5896 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5898 #if VMA_STATS_STRING_ENABLED
5899 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5902 virtual bool CreateAllocationRequest(
5903 uint32_t currentFrameIndex,
5904 uint32_t frameInUseCount,
5905 VkDeviceSize bufferImageGranularity,
5906 VkDeviceSize allocSize,
5907 VkDeviceSize allocAlignment,
5909 VmaSuballocationType allocType,
5910 bool canMakeOtherLost,
5912 VmaAllocationRequest* pAllocationRequest);
5914 virtual bool MakeRequestedAllocationsLost(
5915 uint32_t currentFrameIndex,
5916 uint32_t frameInUseCount,
5917 VmaAllocationRequest* pAllocationRequest);
5919 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5921 virtual VkResult CheckCorruption(
const void* pBlockData);
5924 const VmaAllocationRequest& request,
5925 VmaSuballocationType type,
5926 VkDeviceSize allocSize,
5930 virtual void FreeAtOffset(VkDeviceSize offset);
5940 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5942 enum SECOND_VECTOR_MODE
5944 SECOND_VECTOR_EMPTY,
5949 SECOND_VECTOR_RING_BUFFER,
5955 SECOND_VECTOR_DOUBLE_STACK,
5958 VkDeviceSize m_SumFreeSize;
5959 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5960 uint32_t m_1stVectorIndex;
5961 SECOND_VECTOR_MODE m_2ndVectorMode;
5963 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5964 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5965 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5966 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5969 size_t m_1stNullItemsBeginCount;
5971 size_t m_1stNullItemsMiddleCount;
5973 size_t m_2ndNullItemsCount;
5975 bool ShouldCompact1st()
const;
5976 void CleanupAfterFree();
5978 bool CreateAllocationRequest_LowerAddress(
5979 uint32_t currentFrameIndex,
5980 uint32_t frameInUseCount,
5981 VkDeviceSize bufferImageGranularity,
5982 VkDeviceSize allocSize,
5983 VkDeviceSize allocAlignment,
5984 VmaSuballocationType allocType,
5985 bool canMakeOtherLost,
5987 VmaAllocationRequest* pAllocationRequest);
5988 bool CreateAllocationRequest_UpperAddress(
5989 uint32_t currentFrameIndex,
5990 uint32_t frameInUseCount,
5991 VkDeviceSize bufferImageGranularity,
5992 VkDeviceSize allocSize,
5993 VkDeviceSize allocAlignment,
5994 VmaSuballocationType allocType,
5995 bool canMakeOtherLost,
5997 VmaAllocationRequest* pAllocationRequest);
6011 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6013 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6016 virtual ~VmaBlockMetadata_Buddy();
6017 virtual void Init(VkDeviceSize size);
6019 virtual bool Validate()
const;
6020 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6021 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6022 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6023 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6025 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6026 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6028 #if VMA_STATS_STRING_ENABLED
6029 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6032 virtual bool CreateAllocationRequest(
6033 uint32_t currentFrameIndex,
6034 uint32_t frameInUseCount,
6035 VkDeviceSize bufferImageGranularity,
6036 VkDeviceSize allocSize,
6037 VkDeviceSize allocAlignment,
6039 VmaSuballocationType allocType,
6040 bool canMakeOtherLost,
6042 VmaAllocationRequest* pAllocationRequest);
6044 virtual bool MakeRequestedAllocationsLost(
6045 uint32_t currentFrameIndex,
6046 uint32_t frameInUseCount,
6047 VmaAllocationRequest* pAllocationRequest);
6049 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6051 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6054 const VmaAllocationRequest& request,
6055 VmaSuballocationType type,
6056 VkDeviceSize allocSize,
6059 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6060 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6063 static const VkDeviceSize MIN_NODE_SIZE = 32;
6064 static const size_t MAX_LEVELS = 30;
6066 struct ValidationContext
6068 size_t calculatedAllocationCount;
6069 size_t calculatedFreeCount;
6070 VkDeviceSize calculatedSumFreeSize;
6072 ValidationContext() :
6073 calculatedAllocationCount(0),
6074 calculatedFreeCount(0),
6075 calculatedSumFreeSize(0) { }
6080 VkDeviceSize offset;
6110 VkDeviceSize m_UsableSize;
6111 uint32_t m_LevelCount;
6117 } m_FreeList[MAX_LEVELS];
6119 size_t m_AllocationCount;
6123 VkDeviceSize m_SumFreeSize;
6125 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6126 void DeleteNode(Node* node);
6127 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6128 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6129 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6131 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6132 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6136 void AddToFreeListFront(uint32_t level, Node* node);
6140 void RemoveFromFreeList(uint32_t level, Node* node);
6142 #if VMA_STATS_STRING_ENABLED
6143 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6153 class VmaDeviceMemoryBlock
6155 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6157 VmaBlockMetadata* m_pMetadata;
6161 ~VmaDeviceMemoryBlock()
6163 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6164 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6171 uint32_t newMemoryTypeIndex,
6172 VkDeviceMemory newMemory,
6173 VkDeviceSize newSize,
6175 uint32_t algorithm);
6179 VmaPool GetParentPool()
const {
return m_hParentPool; }
6180 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6181 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6182 uint32_t GetId()
const {
return m_Id; }
6183 void* GetMappedData()
const {
return m_pMappedData; }
6186 bool Validate()
const;
6191 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6194 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6195 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6197 VkResult BindBufferMemory(
6200 VkDeviceSize allocationLocalOffset,
6203 VkResult BindImageMemory(
6206 VkDeviceSize allocationLocalOffset,
6212 uint32_t m_MemoryTypeIndex;
6214 VkDeviceMemory m_hMemory;
6222 uint32_t m_MapCount;
6223 void* m_pMappedData;
6226 struct VmaPointerLess
6228 bool operator()(
const void* lhs,
const void* rhs)
const
6234 struct VmaDefragmentationMove
6236 size_t srcBlockIndex;
6237 size_t dstBlockIndex;
6238 VkDeviceSize srcOffset;
6239 VkDeviceSize dstOffset;
6243 class VmaDefragmentationAlgorithm;
6251 struct VmaBlockVector
6253 VMA_CLASS_NO_COPY(VmaBlockVector)
6258 uint32_t memoryTypeIndex,
6259 VkDeviceSize preferredBlockSize,
6260 size_t minBlockCount,
6261 size_t maxBlockCount,
6262 VkDeviceSize bufferImageGranularity,
6263 uint32_t frameInUseCount,
6264 bool explicitBlockSize,
6265 uint32_t algorithm);
6268 VkResult CreateMinBlocks();
6270 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6271 VmaPool GetParentPool()
const {
return m_hParentPool; }
6272 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6273 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6274 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6275 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6276 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6277 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6282 bool IsCorruptionDetectionEnabled()
const;
6285 uint32_t currentFrameIndex,
6287 VkDeviceSize alignment,
6289 VmaSuballocationType suballocType,
6290 size_t allocationCount,
6298 #if VMA_STATS_STRING_ENABLED
6299 void PrintDetailedMap(
class VmaJsonWriter& json);
6302 void MakePoolAllocationsLost(
6303 uint32_t currentFrameIndex,
6304 size_t* pLostAllocationCount);
6305 VkResult CheckCorruption();
6309 class VmaBlockVectorDefragmentationContext* pCtx,
6311 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6312 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6313 VkCommandBuffer commandBuffer);
6314 void DefragmentationEnd(
6315 class VmaBlockVectorDefragmentationContext* pCtx,
6321 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6322 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6323 size_t CalcAllocationCount()
const;
6324 bool IsBufferImageGranularityConflictPossible()
const;
6327 friend class VmaDefragmentationAlgorithm_Generic;
6331 const uint32_t m_MemoryTypeIndex;
6332 const VkDeviceSize m_PreferredBlockSize;
6333 const size_t m_MinBlockCount;
6334 const size_t m_MaxBlockCount;
6335 const VkDeviceSize m_BufferImageGranularity;
6336 const uint32_t m_FrameInUseCount;
6337 const bool m_ExplicitBlockSize;
6338 const uint32_t m_Algorithm;
6339 VMA_RW_MUTEX m_Mutex;
6343 bool m_HasEmptyBlock;
6345 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6346 uint32_t m_NextBlockId;
6348 VkDeviceSize CalcMaxBlockSize()
const;
6351 void Remove(VmaDeviceMemoryBlock* pBlock);
6355 void IncrementallySortBlocks();
6357 VkResult AllocatePage(
6358 uint32_t currentFrameIndex,
6360 VkDeviceSize alignment,
6362 VmaSuballocationType suballocType,
6366 VkResult AllocateFromBlock(
6367 VmaDeviceMemoryBlock* pBlock,
6368 uint32_t currentFrameIndex,
6370 VkDeviceSize alignment,
6373 VmaSuballocationType suballocType,
6377 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6380 void ApplyDefragmentationMovesCpu(
6381 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6382 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6384 void ApplyDefragmentationMovesGpu(
6385 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6386 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6387 VkCommandBuffer commandBuffer);
6395 void UpdateHasEmptyBlock();
6400 VMA_CLASS_NO_COPY(VmaPool_T)
6402 VmaBlockVector m_BlockVector;
6407 VkDeviceSize preferredBlockSize);
6410 uint32_t GetId()
const {
return m_Id; }
6411 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6413 const char* GetName()
const {
return m_Name; }
6414 void SetName(
const char* pName);
6416 #if VMA_STATS_STRING_ENABLED
6432 class VmaDefragmentationAlgorithm
6434 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6436 VmaDefragmentationAlgorithm(
6438 VmaBlockVector* pBlockVector,
6439 uint32_t currentFrameIndex) :
6440 m_hAllocator(hAllocator),
6441 m_pBlockVector(pBlockVector),
6442 m_CurrentFrameIndex(currentFrameIndex)
6445 virtual ~VmaDefragmentationAlgorithm()
6449 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6450 virtual void AddAll() = 0;
6452 virtual VkResult Defragment(
6453 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6454 VkDeviceSize maxBytesToMove,
6455 uint32_t maxAllocationsToMove) = 0;
6457 virtual VkDeviceSize GetBytesMoved()
const = 0;
6458 virtual uint32_t GetAllocationsMoved()
const = 0;
6462 VmaBlockVector*
const m_pBlockVector;
6463 const uint32_t m_CurrentFrameIndex;
6465 struct AllocationInfo
6468 VkBool32* m_pChanged;
6471 m_hAllocation(VK_NULL_HANDLE),
6472 m_pChanged(VMA_NULL)
6476 m_hAllocation(hAlloc),
6477 m_pChanged(pChanged)
6483 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6485 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6487 VmaDefragmentationAlgorithm_Generic(
6489 VmaBlockVector* pBlockVector,
6490 uint32_t currentFrameIndex,
6491 bool overlappingMoveSupported);
6492 virtual ~VmaDefragmentationAlgorithm_Generic();
6494 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6495 virtual void AddAll() { m_AllAllocations =
true; }
6497 virtual VkResult Defragment(
6498 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6499 VkDeviceSize maxBytesToMove,
6500 uint32_t maxAllocationsToMove);
6502 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6503 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6506 uint32_t m_AllocationCount;
6507 bool m_AllAllocations;
6509 VkDeviceSize m_BytesMoved;
6510 uint32_t m_AllocationsMoved;
6512 struct AllocationInfoSizeGreater
6514 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6516 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6520 struct AllocationInfoOffsetGreater
6522 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6524 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6530 size_t m_OriginalBlockIndex;
6531 VmaDeviceMemoryBlock* m_pBlock;
6532 bool m_HasNonMovableAllocations;
6533 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6535 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6536 m_OriginalBlockIndex(SIZE_MAX),
6538 m_HasNonMovableAllocations(true),
6539 m_Allocations(pAllocationCallbacks)
6543 void CalcHasNonMovableAllocations()
6545 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6546 const size_t defragmentAllocCount = m_Allocations.size();
6547 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6550 void SortAllocationsBySizeDescending()
6552 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6555 void SortAllocationsByOffsetDescending()
6557 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6561 struct BlockPointerLess
6563 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6565 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6567 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6569 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6575 struct BlockInfoCompareMoveDestination
6577 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6579 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6583 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6587 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6595 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6596 BlockInfoVector m_Blocks;
6598 VkResult DefragmentRound(
6599 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6600 VkDeviceSize maxBytesToMove,
6601 uint32_t maxAllocationsToMove);
6603 size_t CalcBlocksWithNonMovableCount()
const;
6605 static bool MoveMakesSense(
6606 size_t dstBlockIndex, VkDeviceSize dstOffset,
6607 size_t srcBlockIndex, VkDeviceSize srcOffset);
6610 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6612 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6614 VmaDefragmentationAlgorithm_Fast(
6616 VmaBlockVector* pBlockVector,
6617 uint32_t currentFrameIndex,
6618 bool overlappingMoveSupported);
6619 virtual ~VmaDefragmentationAlgorithm_Fast();
6621 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6622 virtual void AddAll() { m_AllAllocations =
true; }
6624 virtual VkResult Defragment(
6625 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6626 VkDeviceSize maxBytesToMove,
6627 uint32_t maxAllocationsToMove);
6629 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6630 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6635 size_t origBlockIndex;
6638 class FreeSpaceDatabase
6644 s.blockInfoIndex = SIZE_MAX;
6645 for(
size_t i = 0; i < MAX_COUNT; ++i)
6647 m_FreeSpaces[i] = s;
6651 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6653 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6659 size_t bestIndex = SIZE_MAX;
6660 for(
size_t i = 0; i < MAX_COUNT; ++i)
6663 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6668 if(m_FreeSpaces[i].size < size &&
6669 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6675 if(bestIndex != SIZE_MAX)
6677 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6678 m_FreeSpaces[bestIndex].offset = offset;
6679 m_FreeSpaces[bestIndex].size = size;
6683 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6684 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6686 size_t bestIndex = SIZE_MAX;
6687 VkDeviceSize bestFreeSpaceAfter = 0;
6688 for(
size_t i = 0; i < MAX_COUNT; ++i)
6691 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6693 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6695 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6697 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6699 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6702 bestFreeSpaceAfter = freeSpaceAfter;
6708 if(bestIndex != SIZE_MAX)
6710 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6711 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6713 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6716 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6717 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6718 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6723 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6733 static const size_t MAX_COUNT = 4;
6737 size_t blockInfoIndex;
6738 VkDeviceSize offset;
6740 } m_FreeSpaces[MAX_COUNT];
6743 const bool m_OverlappingMoveSupported;
6745 uint32_t m_AllocationCount;
6746 bool m_AllAllocations;
6748 VkDeviceSize m_BytesMoved;
6749 uint32_t m_AllocationsMoved;
6751 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6753 void PreprocessMetadata();
6754 void PostprocessMetadata();
6755 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6758 struct VmaBlockDefragmentationContext
6762 BLOCK_FLAG_USED = 0x00000001,
6768 class VmaBlockVectorDefragmentationContext
6770 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6774 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6776 VmaBlockVectorDefragmentationContext(
6779 VmaBlockVector* pBlockVector,
6780 uint32_t currFrameIndex);
6781 ~VmaBlockVectorDefragmentationContext();
6783 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6784 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6785 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6787 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6788 void AddAll() { m_AllAllocations =
true; }
6790 void Begin(
bool overlappingMoveSupported);
6797 VmaBlockVector*
const m_pBlockVector;
6798 const uint32_t m_CurrFrameIndex;
6800 VmaDefragmentationAlgorithm* m_pAlgorithm;
6808 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6809 bool m_AllAllocations;
6812 struct VmaDefragmentationContext_T
6815 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6817 VmaDefragmentationContext_T(
6819 uint32_t currFrameIndex,
6822 ~VmaDefragmentationContext_T();
6824 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6825 void AddAllocations(
6826 uint32_t allocationCount,
6828 VkBool32* pAllocationsChanged);
6836 VkResult Defragment(
6837 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6838 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6843 const uint32_t m_CurrFrameIndex;
6844 const uint32_t m_Flags;
6847 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6849 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6852 #if VMA_RECORDING_ENABLED
6859 void WriteConfiguration(
6860 const VkPhysicalDeviceProperties& devProps,
6861 const VkPhysicalDeviceMemoryProperties& memProps,
6862 uint32_t vulkanApiVersion,
6863 bool dedicatedAllocationExtensionEnabled,
6864 bool bindMemory2ExtensionEnabled,
6865 bool memoryBudgetExtensionEnabled);
6868 void RecordCreateAllocator(uint32_t frameIndex);
6869 void RecordDestroyAllocator(uint32_t frameIndex);
6870 void RecordCreatePool(uint32_t frameIndex,
6873 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6874 void RecordAllocateMemory(uint32_t frameIndex,
6875 const VkMemoryRequirements& vkMemReq,
6878 void RecordAllocateMemoryPages(uint32_t frameIndex,
6879 const VkMemoryRequirements& vkMemReq,
6881 uint64_t allocationCount,
6883 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6884 const VkMemoryRequirements& vkMemReq,
6885 bool requiresDedicatedAllocation,
6886 bool prefersDedicatedAllocation,
6889 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6890 const VkMemoryRequirements& vkMemReq,
6891 bool requiresDedicatedAllocation,
6892 bool prefersDedicatedAllocation,
6895 void RecordFreeMemory(uint32_t frameIndex,
6897 void RecordFreeMemoryPages(uint32_t frameIndex,
6898 uint64_t allocationCount,
6900 void RecordSetAllocationUserData(uint32_t frameIndex,
6902 const void* pUserData);
6903 void RecordCreateLostAllocation(uint32_t frameIndex,
6905 void RecordMapMemory(uint32_t frameIndex,
6907 void RecordUnmapMemory(uint32_t frameIndex,
6909 void RecordFlushAllocation(uint32_t frameIndex,
6910 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6911 void RecordInvalidateAllocation(uint32_t frameIndex,
6912 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6913 void RecordCreateBuffer(uint32_t frameIndex,
6914 const VkBufferCreateInfo& bufCreateInfo,
6917 void RecordCreateImage(uint32_t frameIndex,
6918 const VkImageCreateInfo& imageCreateInfo,
6921 void RecordDestroyBuffer(uint32_t frameIndex,
6923 void RecordDestroyImage(uint32_t frameIndex,
6925 void RecordTouchAllocation(uint32_t frameIndex,
6927 void RecordGetAllocationInfo(uint32_t frameIndex,
6929 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6931 void RecordDefragmentationBegin(uint32_t frameIndex,
6934 void RecordDefragmentationEnd(uint32_t frameIndex,
6936 void RecordSetPoolName(uint32_t frameIndex,
6947 class UserDataString
6951 const char* GetString()
const {
return m_Str; }
6961 VMA_MUTEX m_FileMutex;
6963 int64_t m_StartCounter;
6965 void GetBasicParams(CallParams& outParams);
6968 template<
typename T>
6969 void PrintPointerList(uint64_t count,
const T* pItems)
6973 fprintf(m_File,
"%p", pItems[0]);
6974 for(uint64_t i = 1; i < count; ++i)
6976 fprintf(m_File,
" %p", pItems[i]);
6981 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6985 #endif // #if VMA_RECORDING_ENABLED
6990 class VmaAllocationObjectAllocator
6992 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6994 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7001 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7004 struct VmaCurrentBudgetData
7006 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7007 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7009 #if VMA_MEMORY_BUDGET
7010 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7011 VMA_RW_MUTEX m_BudgetMutex;
7012 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7013 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7014 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7015 #endif // #if VMA_MEMORY_BUDGET
7017 VmaCurrentBudgetData()
7019 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7021 m_BlockBytes[heapIndex] = 0;
7022 m_AllocationBytes[heapIndex] = 0;
7023 #if VMA_MEMORY_BUDGET
7024 m_VulkanUsage[heapIndex] = 0;
7025 m_VulkanBudget[heapIndex] = 0;
7026 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7030 #if VMA_MEMORY_BUDGET
7031 m_OperationsSinceBudgetFetch = 0;
7035 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7037 m_AllocationBytes[heapIndex] += allocationSize;
7038 #if VMA_MEMORY_BUDGET
7039 ++m_OperationsSinceBudgetFetch;
7043 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7045 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7046 m_AllocationBytes[heapIndex] -= allocationSize;
7047 #if VMA_MEMORY_BUDGET
7048 ++m_OperationsSinceBudgetFetch;
7054 struct VmaAllocator_T
7056 VMA_CLASS_NO_COPY(VmaAllocator_T)
7059 uint32_t m_VulkanApiVersion;
7060 bool m_UseKhrDedicatedAllocation;
7061 bool m_UseKhrBindMemory2;
7062 bool m_UseExtMemoryBudget;
7064 VkInstance m_hInstance;
7065 bool m_AllocationCallbacksSpecified;
7066 VkAllocationCallbacks m_AllocationCallbacks;
7068 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7071 uint32_t m_HeapSizeLimitMask;
7073 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7074 VkPhysicalDeviceMemoryProperties m_MemProps;
7077 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7080 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7081 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7082 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7084 VmaCurrentBudgetData m_Budget;
7090 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7092 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7096 return m_VulkanFunctions;
7099 VkDeviceSize GetBufferImageGranularity()
const
7102 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7103 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7106 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7107 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7109 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7111 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7112 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7115 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7117 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7118 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7121 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7123 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7124 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7125 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7128 bool IsIntegratedGpu()
const
7130 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7133 #if VMA_RECORDING_ENABLED
7134 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7137 void GetBufferMemoryRequirements(
7139 VkMemoryRequirements& memReq,
7140 bool& requiresDedicatedAllocation,
7141 bool& prefersDedicatedAllocation)
const;
7142 void GetImageMemoryRequirements(
7144 VkMemoryRequirements& memReq,
7145 bool& requiresDedicatedAllocation,
7146 bool& prefersDedicatedAllocation)
const;
7149 VkResult AllocateMemory(
7150 const VkMemoryRequirements& vkMemReq,
7151 bool requiresDedicatedAllocation,
7152 bool prefersDedicatedAllocation,
7153 VkBuffer dedicatedBuffer,
7154 VkImage dedicatedImage,
7156 VmaSuballocationType suballocType,
7157 size_t allocationCount,
7162 size_t allocationCount,
7165 VkResult ResizeAllocation(
7167 VkDeviceSize newSize);
7169 void CalculateStats(
VmaStats* pStats);
7172 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7174 #if VMA_STATS_STRING_ENABLED
7175 void PrintDetailedMap(
class VmaJsonWriter& json);
7178 VkResult DefragmentationBegin(
7182 VkResult DefragmentationEnd(
7189 void DestroyPool(
VmaPool pool);
7192 void SetCurrentFrameIndex(uint32_t frameIndex);
7193 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7195 void MakePoolAllocationsLost(
7197 size_t* pLostAllocationCount);
7198 VkResult CheckPoolCorruption(
VmaPool hPool);
7199 VkResult CheckCorruption(uint32_t memoryTypeBits);
7204 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7206 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7208 VkResult BindVulkanBuffer(
7209 VkDeviceMemory memory,
7210 VkDeviceSize memoryOffset,
7214 VkResult BindVulkanImage(
7215 VkDeviceMemory memory,
7216 VkDeviceSize memoryOffset,
7223 VkResult BindBufferMemory(
7225 VkDeviceSize allocationLocalOffset,
7228 VkResult BindImageMemory(
7230 VkDeviceSize allocationLocalOffset,
7234 void FlushOrInvalidateAllocation(
7236 VkDeviceSize offset, VkDeviceSize size,
7237 VMA_CACHE_OPERATION op);
7239 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7245 uint32_t GetGpuDefragmentationMemoryTypeBits();
7248 VkDeviceSize m_PreferredLargeHeapBlockSize;
7250 VkPhysicalDevice m_PhysicalDevice;
7251 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7252 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7254 VMA_RW_MUTEX m_PoolsMutex;
7256 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7257 uint32_t m_NextPoolId;
7261 #if VMA_RECORDING_ENABLED
7262 VmaRecorder* m_pRecorder;
7267 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7269 VkResult AllocateMemoryOfType(
7271 VkDeviceSize alignment,
7272 bool dedicatedAllocation,
7273 VkBuffer dedicatedBuffer,
7274 VkImage dedicatedImage,
7276 uint32_t memTypeIndex,
7277 VmaSuballocationType suballocType,
7278 size_t allocationCount,
7282 VkResult AllocateDedicatedMemoryPage(
7284 VmaSuballocationType suballocType,
7285 uint32_t memTypeIndex,
7286 const VkMemoryAllocateInfo& allocInfo,
7288 bool isUserDataString,
7293 VkResult AllocateDedicatedMemory(
7295 VmaSuballocationType suballocType,
7296 uint32_t memTypeIndex,
7299 bool isUserDataString,
7301 VkBuffer dedicatedBuffer,
7302 VkImage dedicatedImage,
7303 size_t allocationCount,
7312 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7314 #if VMA_MEMORY_BUDGET
7315 void UpdateVulkanBudget();
7316 #endif // #if VMA_MEMORY_BUDGET
7322 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7324 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7327 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7329 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7332 template<
typename T>
7335 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7338 template<
typename T>
7339 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7341 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7344 template<
typename T>
7345 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7350 VmaFree(hAllocator, ptr);
7354 template<
typename T>
7355 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7359 for(
size_t i = count; i--; )
7361 VmaFree(hAllocator, ptr);
7368 #if VMA_STATS_STRING_ENABLED
7370 class VmaStringBuilder
7373 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7374 size_t GetLength()
const {
return m_Data.size(); }
7375 const char* GetData()
const {
return m_Data.data(); }
7377 void Add(
char ch) { m_Data.push_back(ch); }
7378 void Add(
const char* pStr);
7379 void AddNewLine() { Add(
'\n'); }
7380 void AddNumber(uint32_t num);
7381 void AddNumber(uint64_t num);
7382 void AddPointer(
const void* ptr);
7385 VmaVector< char, VmaStlAllocator<char> > m_Data;
7388 void VmaStringBuilder::Add(
const char* pStr)
7390 const size_t strLen = strlen(pStr);
7393 const size_t oldCount = m_Data.size();
7394 m_Data.resize(oldCount + strLen);
7395 memcpy(m_Data.data() + oldCount, pStr, strLen);
7399 void VmaStringBuilder::AddNumber(uint32_t num)
7406 *--p =
'0' + (num % 10);
7413 void VmaStringBuilder::AddNumber(uint64_t num)
7420 *--p =
'0' + (num % 10);
7427 void VmaStringBuilder::AddPointer(
const void* ptr)
7430 VmaPtrToStr(buf,
sizeof(buf), ptr);
7434 #endif // #if VMA_STATS_STRING_ENABLED
7439 #if VMA_STATS_STRING_ENABLED
7443 VMA_CLASS_NO_COPY(VmaJsonWriter)
7445 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7448 void BeginObject(
bool singleLine =
false);
7451 void BeginArray(
bool singleLine =
false);
7454 void WriteString(
const char* pStr);
7455 void BeginString(
const char* pStr = VMA_NULL);
7456 void ContinueString(
const char* pStr);
7457 void ContinueString(uint32_t n);
7458 void ContinueString(uint64_t n);
7459 void ContinueString_Pointer(
const void* ptr);
7460 void EndString(
const char* pStr = VMA_NULL);
7462 void WriteNumber(uint32_t n);
7463 void WriteNumber(uint64_t n);
7464 void WriteBool(
bool b);
7468 static const char*
const INDENT;
7470 enum COLLECTION_TYPE
7472 COLLECTION_TYPE_OBJECT,
7473 COLLECTION_TYPE_ARRAY,
7477 COLLECTION_TYPE type;
7478 uint32_t valueCount;
7479 bool singleLineMode;
7482 VmaStringBuilder& m_SB;
7483 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7484 bool m_InsideString;
7486 void BeginValue(
bool isString);
7487 void WriteIndent(
bool oneLess =
false);
7490 const char*
const VmaJsonWriter::INDENT =
" ";
7492 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7494 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7495 m_InsideString(false)
7499 VmaJsonWriter::~VmaJsonWriter()
7501 VMA_ASSERT(!m_InsideString);
7502 VMA_ASSERT(m_Stack.empty());
7505 void VmaJsonWriter::BeginObject(
bool singleLine)
7507 VMA_ASSERT(!m_InsideString);
7513 item.type = COLLECTION_TYPE_OBJECT;
7514 item.valueCount = 0;
7515 item.singleLineMode = singleLine;
7516 m_Stack.push_back(item);
7519 void VmaJsonWriter::EndObject()
7521 VMA_ASSERT(!m_InsideString);
7526 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7530 void VmaJsonWriter::BeginArray(
bool singleLine)
7532 VMA_ASSERT(!m_InsideString);
7538 item.type = COLLECTION_TYPE_ARRAY;
7539 item.valueCount = 0;
7540 item.singleLineMode = singleLine;
7541 m_Stack.push_back(item);
7544 void VmaJsonWriter::EndArray()
7546 VMA_ASSERT(!m_InsideString);
7551 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7555 void VmaJsonWriter::WriteString(
const char* pStr)
7561 void VmaJsonWriter::BeginString(
const char* pStr)
7563 VMA_ASSERT(!m_InsideString);
7567 m_InsideString =
true;
7568 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7570 ContinueString(pStr);
7574 void VmaJsonWriter::ContinueString(
const char* pStr)
7576 VMA_ASSERT(m_InsideString);
7578 const size_t strLen = strlen(pStr);
7579 for(
size_t i = 0; i < strLen; ++i)
7612 VMA_ASSERT(0 &&
"Character not currently supported.");
7618 void VmaJsonWriter::ContinueString(uint32_t n)
7620 VMA_ASSERT(m_InsideString);
7624 void VmaJsonWriter::ContinueString(uint64_t n)
7626 VMA_ASSERT(m_InsideString);
7630 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7632 VMA_ASSERT(m_InsideString);
7633 m_SB.AddPointer(ptr);
7636 void VmaJsonWriter::EndString(
const char* pStr)
7638 VMA_ASSERT(m_InsideString);
7639 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7641 ContinueString(pStr);
7644 m_InsideString =
false;
7647 void VmaJsonWriter::WriteNumber(uint32_t n)
7649 VMA_ASSERT(!m_InsideString);
7654 void VmaJsonWriter::WriteNumber(uint64_t n)
7656 VMA_ASSERT(!m_InsideString);
7661 void VmaJsonWriter::WriteBool(
bool b)
7663 VMA_ASSERT(!m_InsideString);
7665 m_SB.Add(b ?
"true" :
"false");
7668 void VmaJsonWriter::WriteNull()
7670 VMA_ASSERT(!m_InsideString);
7675 void VmaJsonWriter::BeginValue(
bool isString)
7677 if(!m_Stack.empty())
7679 StackItem& currItem = m_Stack.back();
7680 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7681 currItem.valueCount % 2 == 0)
7683 VMA_ASSERT(isString);
7686 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7687 currItem.valueCount % 2 != 0)
7691 else if(currItem.valueCount > 0)
7700 ++currItem.valueCount;
7704 void VmaJsonWriter::WriteIndent(
bool oneLess)
7706 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7710 size_t count = m_Stack.size();
7711 if(count > 0 && oneLess)
7715 for(
size_t i = 0; i < count; ++i)
7722 #endif // #if VMA_STATS_STRING_ENABLED
7726 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7728 if(IsUserDataString())
7730 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7732 FreeUserDataString(hAllocator);
7734 if(pUserData != VMA_NULL)
7736 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7741 m_pUserData = pUserData;
7745 void VmaAllocation_T::ChangeBlockAllocation(
7747 VmaDeviceMemoryBlock* block,
7748 VkDeviceSize offset)
7750 VMA_ASSERT(block != VMA_NULL);
7751 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7754 if(block != m_BlockAllocation.m_Block)
7756 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7757 if(IsPersistentMap())
7759 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7760 block->Map(hAllocator, mapRefCount, VMA_NULL);
7763 m_BlockAllocation.m_Block = block;
7764 m_BlockAllocation.m_Offset = offset;
7767 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7769 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7770 m_BlockAllocation.m_Offset = newOffset;
7773 VkDeviceSize VmaAllocation_T::GetOffset()
const
7777 case ALLOCATION_TYPE_BLOCK:
7778 return m_BlockAllocation.m_Offset;
7779 case ALLOCATION_TYPE_DEDICATED:
7787 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7791 case ALLOCATION_TYPE_BLOCK:
7792 return m_BlockAllocation.m_Block->GetDeviceMemory();
7793 case ALLOCATION_TYPE_DEDICATED:
7794 return m_DedicatedAllocation.m_hMemory;
7797 return VK_NULL_HANDLE;
7801 void* VmaAllocation_T::GetMappedData()
const
7805 case ALLOCATION_TYPE_BLOCK:
7808 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7809 VMA_ASSERT(pBlockData != VMA_NULL);
7810 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7817 case ALLOCATION_TYPE_DEDICATED:
7818 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7819 return m_DedicatedAllocation.m_pMappedData;
7826 bool VmaAllocation_T::CanBecomeLost()
const
7830 case ALLOCATION_TYPE_BLOCK:
7831 return m_BlockAllocation.m_CanBecomeLost;
7832 case ALLOCATION_TYPE_DEDICATED:
7840 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7842 VMA_ASSERT(CanBecomeLost());
7848 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7851 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7856 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7862 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7872 #if VMA_STATS_STRING_ENABLED
7875 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7884 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
7886 json.WriteString(
"Type");
7887 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7889 json.WriteString(
"Size");
7890 json.WriteNumber(m_Size);
7892 if(m_pUserData != VMA_NULL)
7894 json.WriteString(
"UserData");
7895 if(IsUserDataString())
7897 json.WriteString((
const char*)m_pUserData);
7902 json.ContinueString_Pointer(m_pUserData);
7907 json.WriteString(
"CreationFrameIndex");
7908 json.WriteNumber(m_CreationFrameIndex);
7910 json.WriteString(
"LastUseFrameIndex");
7911 json.WriteNumber(GetLastUseFrameIndex());
7913 if(m_BufferImageUsage != 0)
7915 json.WriteString(
"Usage");
7916 json.WriteNumber(m_BufferImageUsage);
7922 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7924 VMA_ASSERT(IsUserDataString());
7925 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
7926 m_pUserData = VMA_NULL;
7929 void VmaAllocation_T::BlockAllocMap()
7931 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7933 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7939 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7943 void VmaAllocation_T::BlockAllocUnmap()
7945 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7947 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7953 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7957 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7959 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7963 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7965 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7966 *ppData = m_DedicatedAllocation.m_pMappedData;
7972 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7973 return VK_ERROR_MEMORY_MAP_FAILED;
7978 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7979 hAllocator->m_hDevice,
7980 m_DedicatedAllocation.m_hMemory,
7985 if(result == VK_SUCCESS)
7987 m_DedicatedAllocation.m_pMappedData = *ppData;
7994 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7996 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7998 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8003 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8004 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8005 hAllocator->m_hDevice,
8006 m_DedicatedAllocation.m_hMemory);
8011 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8015 #if VMA_STATS_STRING_ENABLED
8017 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8021 json.WriteString(
"Blocks");
8024 json.WriteString(
"Allocations");
8027 json.WriteString(
"UnusedRanges");
8030 json.WriteString(
"UsedBytes");
8033 json.WriteString(
"UnusedBytes");
8038 json.WriteString(
"AllocationSize");
8039 json.BeginObject(
true);
8040 json.WriteString(
"Min");
8042 json.WriteString(
"Avg");
8044 json.WriteString(
"Max");
8051 json.WriteString(
"UnusedRangeSize");
8052 json.BeginObject(
true);
8053 json.WriteString(
"Min");
8055 json.WriteString(
"Avg");
8057 json.WriteString(
"Max");
8065 #endif // #if VMA_STATS_STRING_ENABLED
8067 struct VmaSuballocationItemSizeLess
8070 const VmaSuballocationList::iterator lhs,
8071 const VmaSuballocationList::iterator rhs)
const
8073 return lhs->size < rhs->size;
8076 const VmaSuballocationList::iterator lhs,
8077 VkDeviceSize rhsSize)
const
8079 return lhs->size < rhsSize;
8087 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8089 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8093 #if VMA_STATS_STRING_ENABLED
8095 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8096 VkDeviceSize unusedBytes,
8097 size_t allocationCount,
8098 size_t unusedRangeCount)
const
8102 json.WriteString(
"TotalBytes");
8103 json.WriteNumber(GetSize());
8105 json.WriteString(
"UnusedBytes");
8106 json.WriteNumber(unusedBytes);
8108 json.WriteString(
"Allocations");
8109 json.WriteNumber((uint64_t)allocationCount);
8111 json.WriteString(
"UnusedRanges");
8112 json.WriteNumber((uint64_t)unusedRangeCount);
8114 json.WriteString(
"Suballocations");
8118 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8119 VkDeviceSize offset,
8122 json.BeginObject(
true);
8124 json.WriteString(
"Offset");
8125 json.WriteNumber(offset);
8127 hAllocation->PrintParameters(json);
8132 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8133 VkDeviceSize offset,
8134 VkDeviceSize size)
const
8136 json.BeginObject(
true);
8138 json.WriteString(
"Offset");
8139 json.WriteNumber(offset);
8141 json.WriteString(
"Type");
8142 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8144 json.WriteString(
"Size");
8145 json.WriteNumber(size);
8150 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8156 #endif // #if VMA_STATS_STRING_ENABLED
8161 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8162 VmaBlockMetadata(hAllocator),
8165 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8166 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8170 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8174 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8176 VmaBlockMetadata::Init(size);
8179 m_SumFreeSize = size;
8181 VmaSuballocation suballoc = {};
8182 suballoc.offset = 0;
8183 suballoc.size = size;
8184 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8185 suballoc.hAllocation = VK_NULL_HANDLE;
8187 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8188 m_Suballocations.push_back(suballoc);
8189 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8191 m_FreeSuballocationsBySize.push_back(suballocItem);
8194 bool VmaBlockMetadata_Generic::Validate()
const
8196 VMA_VALIDATE(!m_Suballocations.empty());
8199 VkDeviceSize calculatedOffset = 0;
8201 uint32_t calculatedFreeCount = 0;
8203 VkDeviceSize calculatedSumFreeSize = 0;
8206 size_t freeSuballocationsToRegister = 0;
8208 bool prevFree =
false;
8210 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8211 suballocItem != m_Suballocations.cend();
8214 const VmaSuballocation& subAlloc = *suballocItem;
8217 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8219 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8221 VMA_VALIDATE(!prevFree || !currFree);
8223 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8227 calculatedSumFreeSize += subAlloc.size;
8228 ++calculatedFreeCount;
8229 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8231 ++freeSuballocationsToRegister;
8235 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8239 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8240 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8243 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8246 calculatedOffset += subAlloc.size;
8247 prevFree = currFree;
8252 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8254 VkDeviceSize lastSize = 0;
8255 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8257 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8260 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8262 VMA_VALIDATE(suballocItem->size >= lastSize);
8264 lastSize = suballocItem->size;
8268 VMA_VALIDATE(ValidateFreeSuballocationList());
8269 VMA_VALIDATE(calculatedOffset == GetSize());
8270 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8271 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8276 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8278 if(!m_FreeSuballocationsBySize.empty())
8280 return m_FreeSuballocationsBySize.back()->size;
8288 bool VmaBlockMetadata_Generic::IsEmpty()
const
8290 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8293 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8297 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8309 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8310 suballocItem != m_Suballocations.cend();
8313 const VmaSuballocation& suballoc = *suballocItem;
8314 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8327 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8329 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8331 inoutStats.
size += GetSize();
8338 #if VMA_STATS_STRING_ENABLED
8340 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8342 PrintDetailedMap_Begin(json,
8344 m_Suballocations.size() - (size_t)m_FreeCount,
8348 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8349 suballocItem != m_Suballocations.cend();
8350 ++suballocItem, ++i)
8352 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8354 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8358 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8362 PrintDetailedMap_End(json);
8365 #endif // #if VMA_STATS_STRING_ENABLED
8367 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8368 uint32_t currentFrameIndex,
8369 uint32_t frameInUseCount,
8370 VkDeviceSize bufferImageGranularity,
8371 VkDeviceSize allocSize,
8372 VkDeviceSize allocAlignment,
8374 VmaSuballocationType allocType,
8375 bool canMakeOtherLost,
8377 VmaAllocationRequest* pAllocationRequest)
8379 VMA_ASSERT(allocSize > 0);
8380 VMA_ASSERT(!upperAddress);
8381 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8382 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8383 VMA_HEAVY_ASSERT(Validate());
8385 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8388 if(canMakeOtherLost ==
false &&
8389 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8395 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8396 if(freeSuballocCount > 0)
8401 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8402 m_FreeSuballocationsBySize.data(),
8403 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8404 allocSize + 2 * VMA_DEBUG_MARGIN,
8405 VmaSuballocationItemSizeLess());
8406 size_t index = it - m_FreeSuballocationsBySize.data();
8407 for(; index < freeSuballocCount; ++index)
8412 bufferImageGranularity,
8416 m_FreeSuballocationsBySize[index],
8418 &pAllocationRequest->offset,
8419 &pAllocationRequest->itemsToMakeLostCount,
8420 &pAllocationRequest->sumFreeSize,
8421 &pAllocationRequest->sumItemSize))
8423 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8428 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8430 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8431 it != m_Suballocations.end();
8434 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8437 bufferImageGranularity,
8443 &pAllocationRequest->offset,
8444 &pAllocationRequest->itemsToMakeLostCount,
8445 &pAllocationRequest->sumFreeSize,
8446 &pAllocationRequest->sumItemSize))
8448 pAllocationRequest->item = it;
8456 for(
size_t index = freeSuballocCount; index--; )
8461 bufferImageGranularity,
8465 m_FreeSuballocationsBySize[index],
8467 &pAllocationRequest->offset,
8468 &pAllocationRequest->itemsToMakeLostCount,
8469 &pAllocationRequest->sumFreeSize,
8470 &pAllocationRequest->sumItemSize))
8472 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8479 if(canMakeOtherLost)
8484 VmaAllocationRequest tmpAllocRequest = {};
8485 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8486 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8487 suballocIt != m_Suballocations.end();
8490 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8491 suballocIt->hAllocation->CanBecomeLost())
8496 bufferImageGranularity,
8502 &tmpAllocRequest.offset,
8503 &tmpAllocRequest.itemsToMakeLostCount,
8504 &tmpAllocRequest.sumFreeSize,
8505 &tmpAllocRequest.sumItemSize))
8509 *pAllocationRequest = tmpAllocRequest;
8510 pAllocationRequest->item = suballocIt;
8513 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8515 *pAllocationRequest = tmpAllocRequest;
8516 pAllocationRequest->item = suballocIt;
8529 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8530 uint32_t currentFrameIndex,
8531 uint32_t frameInUseCount,
8532 VmaAllocationRequest* pAllocationRequest)
8534 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8536 while(pAllocationRequest->itemsToMakeLostCount > 0)
8538 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8540 ++pAllocationRequest->item;
8542 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8543 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8544 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8545 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8547 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8548 --pAllocationRequest->itemsToMakeLostCount;
8556 VMA_HEAVY_ASSERT(Validate());
8557 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8558 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8563 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8565 uint32_t lostAllocationCount = 0;
8566 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8567 it != m_Suballocations.end();
8570 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8571 it->hAllocation->CanBecomeLost() &&
8572 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8574 it = FreeSuballocation(it);
8575 ++lostAllocationCount;
8578 return lostAllocationCount;
8581 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8583 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8584 it != m_Suballocations.end();
8587 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8589 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8591 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8592 return VK_ERROR_VALIDATION_FAILED_EXT;
8594 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8596 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8597 return VK_ERROR_VALIDATION_FAILED_EXT;
8605 void VmaBlockMetadata_Generic::Alloc(
8606 const VmaAllocationRequest& request,
8607 VmaSuballocationType type,
8608 VkDeviceSize allocSize,
8611 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8612 VMA_ASSERT(request.item != m_Suballocations.end());
8613 VmaSuballocation& suballoc = *request.item;
8615 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8617 VMA_ASSERT(request.offset >= suballoc.offset);
8618 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8619 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8620 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8624 UnregisterFreeSuballocation(request.item);
8626 suballoc.offset = request.offset;
8627 suballoc.size = allocSize;
8628 suballoc.type = type;
8629 suballoc.hAllocation = hAllocation;
8634 VmaSuballocation paddingSuballoc = {};
8635 paddingSuballoc.offset = request.offset + allocSize;
8636 paddingSuballoc.size = paddingEnd;
8637 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8638 VmaSuballocationList::iterator next = request.item;
8640 const VmaSuballocationList::iterator paddingEndItem =
8641 m_Suballocations.insert(next, paddingSuballoc);
8642 RegisterFreeSuballocation(paddingEndItem);
8648 VmaSuballocation paddingSuballoc = {};
8649 paddingSuballoc.offset = request.offset - paddingBegin;
8650 paddingSuballoc.size = paddingBegin;
8651 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8652 const VmaSuballocationList::iterator paddingBeginItem =
8653 m_Suballocations.insert(request.item, paddingSuballoc);
8654 RegisterFreeSuballocation(paddingBeginItem);
8658 m_FreeCount = m_FreeCount - 1;
8659 if(paddingBegin > 0)
8667 m_SumFreeSize -= allocSize;
8670 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8672 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8673 suballocItem != m_Suballocations.end();
8676 VmaSuballocation& suballoc = *suballocItem;
8677 if(suballoc.hAllocation == allocation)
8679 FreeSuballocation(suballocItem);
8680 VMA_HEAVY_ASSERT(Validate());
8684 VMA_ASSERT(0 &&
"Not found!");
8687 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8689 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8690 suballocItem != m_Suballocations.end();
8693 VmaSuballocation& suballoc = *suballocItem;
8694 if(suballoc.offset == offset)
8696 FreeSuballocation(suballocItem);
8700 VMA_ASSERT(0 &&
"Not found!");
8703 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8705 VkDeviceSize lastSize = 0;
8706 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8708 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8710 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8711 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8712 VMA_VALIDATE(it->size >= lastSize);
8713 lastSize = it->size;
8718 bool VmaBlockMetadata_Generic::CheckAllocation(
8719 uint32_t currentFrameIndex,
8720 uint32_t frameInUseCount,
8721 VkDeviceSize bufferImageGranularity,
8722 VkDeviceSize allocSize,
8723 VkDeviceSize allocAlignment,
8724 VmaSuballocationType allocType,
8725 VmaSuballocationList::const_iterator suballocItem,
8726 bool canMakeOtherLost,
8727 VkDeviceSize* pOffset,
8728 size_t* itemsToMakeLostCount,
8729 VkDeviceSize* pSumFreeSize,
8730 VkDeviceSize* pSumItemSize)
const
8732 VMA_ASSERT(allocSize > 0);
8733 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8734 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8735 VMA_ASSERT(pOffset != VMA_NULL);
8737 *itemsToMakeLostCount = 0;
8741 if(canMakeOtherLost)
8743 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8745 *pSumFreeSize = suballocItem->size;
8749 if(suballocItem->hAllocation->CanBecomeLost() &&
8750 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8752 ++*itemsToMakeLostCount;
8753 *pSumItemSize = suballocItem->size;
8762 if(GetSize() - suballocItem->offset < allocSize)
8768 *pOffset = suballocItem->offset;
8771 if(VMA_DEBUG_MARGIN > 0)
8773 *pOffset += VMA_DEBUG_MARGIN;
8777 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8781 if(bufferImageGranularity > 1)
8783 bool bufferImageGranularityConflict =
false;
8784 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8785 while(prevSuballocItem != m_Suballocations.cbegin())
8788 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8789 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8791 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8793 bufferImageGranularityConflict =
true;
8801 if(bufferImageGranularityConflict)
8803 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8809 if(*pOffset >= suballocItem->offset + suballocItem->size)
8815 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8818 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8820 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8822 if(suballocItem->offset + totalSize > GetSize())
8829 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8830 if(totalSize > suballocItem->size)
8832 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8833 while(remainingSize > 0)
8836 if(lastSuballocItem == m_Suballocations.cend())
8840 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8842 *pSumFreeSize += lastSuballocItem->size;
8846 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8847 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8848 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8850 ++*itemsToMakeLostCount;
8851 *pSumItemSize += lastSuballocItem->size;
8858 remainingSize = (lastSuballocItem->size < remainingSize) ?
8859 remainingSize - lastSuballocItem->size : 0;
8865 if(bufferImageGranularity > 1)
8867 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8869 while(nextSuballocItem != m_Suballocations.cend())
8871 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8872 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8874 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8876 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8877 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8878 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8880 ++*itemsToMakeLostCount;
8899 const VmaSuballocation& suballoc = *suballocItem;
8900 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8902 *pSumFreeSize = suballoc.size;
8905 if(suballoc.size < allocSize)
8911 *pOffset = suballoc.offset;
8914 if(VMA_DEBUG_MARGIN > 0)
8916 *pOffset += VMA_DEBUG_MARGIN;
8920 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8924 if(bufferImageGranularity > 1)
8926 bool bufferImageGranularityConflict =
false;
8927 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8928 while(prevSuballocItem != m_Suballocations.cbegin())
8931 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8932 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8934 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8936 bufferImageGranularityConflict =
true;
8944 if(bufferImageGranularityConflict)
8946 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8951 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8954 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8957 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8964 if(bufferImageGranularity > 1)
8966 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8968 while(nextSuballocItem != m_Suballocations.cend())
8970 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8971 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8973 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8992 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8994 VMA_ASSERT(item != m_Suballocations.end());
8995 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8997 VmaSuballocationList::iterator nextItem = item;
8999 VMA_ASSERT(nextItem != m_Suballocations.end());
9000 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9002 item->size += nextItem->size;
9004 m_Suballocations.erase(nextItem);
9007 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9010 VmaSuballocation& suballoc = *suballocItem;
9011 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9012 suballoc.hAllocation = VK_NULL_HANDLE;
9016 m_SumFreeSize += suballoc.size;
9019 bool mergeWithNext =
false;
9020 bool mergeWithPrev =
false;
9022 VmaSuballocationList::iterator nextItem = suballocItem;
9024 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9026 mergeWithNext =
true;
9029 VmaSuballocationList::iterator prevItem = suballocItem;
9030 if(suballocItem != m_Suballocations.begin())
9033 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9035 mergeWithPrev =
true;
9041 UnregisterFreeSuballocation(nextItem);
9042 MergeFreeWithNext(suballocItem);
9047 UnregisterFreeSuballocation(prevItem);
9048 MergeFreeWithNext(prevItem);
9049 RegisterFreeSuballocation(prevItem);
9054 RegisterFreeSuballocation(suballocItem);
9055 return suballocItem;
9059 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9061 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9062 VMA_ASSERT(item->size > 0);
9066 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9068 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9070 if(m_FreeSuballocationsBySize.empty())
9072 m_FreeSuballocationsBySize.push_back(item);
9076 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9084 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9086 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9087 VMA_ASSERT(item->size > 0);
9091 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9093 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9095 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9096 m_FreeSuballocationsBySize.data(),
9097 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9099 VmaSuballocationItemSizeLess());
9100 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9101 index < m_FreeSuballocationsBySize.size();
9104 if(m_FreeSuballocationsBySize[index] == item)
9106 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9109 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9111 VMA_ASSERT(0 &&
"Not found.");
9117 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9118 VkDeviceSize bufferImageGranularity,
9119 VmaSuballocationType& inOutPrevSuballocType)
const
9121 if(bufferImageGranularity == 1 || IsEmpty())
9126 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9127 bool typeConflictFound =
false;
9128 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9129 it != m_Suballocations.cend();
9132 const VmaSuballocationType suballocType = it->type;
9133 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9135 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9136 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9138 typeConflictFound =
true;
9140 inOutPrevSuballocType = suballocType;
9144 return typeConflictFound || minAlignment >= bufferImageGranularity;
9150 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9151 VmaBlockMetadata(hAllocator),
9153 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9154 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9155 m_1stVectorIndex(0),
9156 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9157 m_1stNullItemsBeginCount(0),
9158 m_1stNullItemsMiddleCount(0),
9159 m_2ndNullItemsCount(0)
9163 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9167 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9169 VmaBlockMetadata::Init(size);
9170 m_SumFreeSize = size;
9173 bool VmaBlockMetadata_Linear::Validate()
const
9175 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9176 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9178 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9179 VMA_VALIDATE(!suballocations1st.empty() ||
9180 suballocations2nd.empty() ||
9181 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9183 if(!suballocations1st.empty())
9186 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9188 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9190 if(!suballocations2nd.empty())
9193 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9196 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9197 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9199 VkDeviceSize sumUsedSize = 0;
9200 const size_t suballoc1stCount = suballocations1st.size();
9201 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9203 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9205 const size_t suballoc2ndCount = suballocations2nd.size();
9206 size_t nullItem2ndCount = 0;
9207 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9209 const VmaSuballocation& suballoc = suballocations2nd[i];
9210 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9212 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9213 VMA_VALIDATE(suballoc.offset >= offset);
9217 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9218 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9219 sumUsedSize += suballoc.size;
9226 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9229 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9232 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9234 const VmaSuballocation& suballoc = suballocations1st[i];
9235 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9236 suballoc.hAllocation == VK_NULL_HANDLE);
9239 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9241 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9243 const VmaSuballocation& suballoc = suballocations1st[i];
9244 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9246 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9247 VMA_VALIDATE(suballoc.offset >= offset);
9248 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9252 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9253 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9254 sumUsedSize += suballoc.size;
9261 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9263 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9265 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9267 const size_t suballoc2ndCount = suballocations2nd.size();
9268 size_t nullItem2ndCount = 0;
9269 for(
size_t i = suballoc2ndCount; i--; )
9271 const VmaSuballocation& suballoc = suballocations2nd[i];
9272 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9274 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9275 VMA_VALIDATE(suballoc.offset >= offset);
9279 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9280 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9281 sumUsedSize += suballoc.size;
9288 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9291 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9294 VMA_VALIDATE(offset <= GetSize());
9295 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9300 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9302 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9303 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9306 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9308 const VkDeviceSize size = GetSize();
9320 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9322 switch(m_2ndVectorMode)
9324 case SECOND_VECTOR_EMPTY:
9330 const size_t suballocations1stCount = suballocations1st.size();
9331 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9332 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9333 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9335 firstSuballoc.offset,
9336 size - (lastSuballoc.offset + lastSuballoc.size));
9340 case SECOND_VECTOR_RING_BUFFER:
9345 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9346 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9347 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9348 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9352 case SECOND_VECTOR_DOUBLE_STACK:
9357 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9358 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9359 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9360 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9370 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9372 const VkDeviceSize size = GetSize();
9373 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9374 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9375 const size_t suballoc1stCount = suballocations1st.size();
9376 const size_t suballoc2ndCount = suballocations2nd.size();
9387 VkDeviceSize lastOffset = 0;
9389 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9391 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9392 size_t nextAlloc2ndIndex = 0;
9393 while(lastOffset < freeSpace2ndTo1stEnd)
9396 while(nextAlloc2ndIndex < suballoc2ndCount &&
9397 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9399 ++nextAlloc2ndIndex;
9403 if(nextAlloc2ndIndex < suballoc2ndCount)
9405 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9408 if(lastOffset < suballoc.offset)
9411 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9425 lastOffset = suballoc.offset + suballoc.size;
9426 ++nextAlloc2ndIndex;
9432 if(lastOffset < freeSpace2ndTo1stEnd)
9434 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9442 lastOffset = freeSpace2ndTo1stEnd;
9447 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9448 const VkDeviceSize freeSpace1stTo2ndEnd =
9449 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9450 while(lastOffset < freeSpace1stTo2ndEnd)
9453 while(nextAlloc1stIndex < suballoc1stCount &&
9454 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9456 ++nextAlloc1stIndex;
9460 if(nextAlloc1stIndex < suballoc1stCount)
9462 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9465 if(lastOffset < suballoc.offset)
9468 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9482 lastOffset = suballoc.offset + suballoc.size;
9483 ++nextAlloc1stIndex;
9489 if(lastOffset < freeSpace1stTo2ndEnd)
9491 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9499 lastOffset = freeSpace1stTo2ndEnd;
9503 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9505 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9506 while(lastOffset < size)
9509 while(nextAlloc2ndIndex != SIZE_MAX &&
9510 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9512 --nextAlloc2ndIndex;
9516 if(nextAlloc2ndIndex != SIZE_MAX)
9518 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9521 if(lastOffset < suballoc.offset)
9524 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9538 lastOffset = suballoc.offset + suballoc.size;
9539 --nextAlloc2ndIndex;
9545 if(lastOffset < size)
9547 const VkDeviceSize unusedRangeSize = size - lastOffset;
9563 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9565 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9566 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9567 const VkDeviceSize size = GetSize();
9568 const size_t suballoc1stCount = suballocations1st.size();
9569 const size_t suballoc2ndCount = suballocations2nd.size();
9571 inoutStats.
size += size;
9573 VkDeviceSize lastOffset = 0;
9575 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9577 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9578 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9579 while(lastOffset < freeSpace2ndTo1stEnd)
9582 while(nextAlloc2ndIndex < suballoc2ndCount &&
9583 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9585 ++nextAlloc2ndIndex;
9589 if(nextAlloc2ndIndex < suballoc2ndCount)
9591 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9594 if(lastOffset < suballoc.offset)
9597 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9608 lastOffset = suballoc.offset + suballoc.size;
9609 ++nextAlloc2ndIndex;
9614 if(lastOffset < freeSpace2ndTo1stEnd)
9617 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9624 lastOffset = freeSpace2ndTo1stEnd;
9629 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9630 const VkDeviceSize freeSpace1stTo2ndEnd =
9631 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9632 while(lastOffset < freeSpace1stTo2ndEnd)
9635 while(nextAlloc1stIndex < suballoc1stCount &&
9636 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9638 ++nextAlloc1stIndex;
9642 if(nextAlloc1stIndex < suballoc1stCount)
9644 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9647 if(lastOffset < suballoc.offset)
9650 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9661 lastOffset = suballoc.offset + suballoc.size;
9662 ++nextAlloc1stIndex;
9667 if(lastOffset < freeSpace1stTo2ndEnd)
9670 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9677 lastOffset = freeSpace1stTo2ndEnd;
9681 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9683 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9684 while(lastOffset < size)
9687 while(nextAlloc2ndIndex != SIZE_MAX &&
9688 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9690 --nextAlloc2ndIndex;
9694 if(nextAlloc2ndIndex != SIZE_MAX)
9696 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9699 if(lastOffset < suballoc.offset)
9702 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9713 lastOffset = suballoc.offset + suballoc.size;
9714 --nextAlloc2ndIndex;
9719 if(lastOffset < size)
9722 const VkDeviceSize unusedRangeSize = size - lastOffset;
9735 #if VMA_STATS_STRING_ENABLED
9736 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9738 const VkDeviceSize size = GetSize();
9739 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9740 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9741 const size_t suballoc1stCount = suballocations1st.size();
9742 const size_t suballoc2ndCount = suballocations2nd.size();
9746 size_t unusedRangeCount = 0;
9747 VkDeviceSize usedBytes = 0;
9749 VkDeviceSize lastOffset = 0;
9751 size_t alloc2ndCount = 0;
9752 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9754 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9755 size_t nextAlloc2ndIndex = 0;
9756 while(lastOffset < freeSpace2ndTo1stEnd)
9759 while(nextAlloc2ndIndex < suballoc2ndCount &&
9760 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9762 ++nextAlloc2ndIndex;
9766 if(nextAlloc2ndIndex < suballoc2ndCount)
9768 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9771 if(lastOffset < suballoc.offset)
9780 usedBytes += suballoc.size;
9783 lastOffset = suballoc.offset + suballoc.size;
9784 ++nextAlloc2ndIndex;
9789 if(lastOffset < freeSpace2ndTo1stEnd)
9796 lastOffset = freeSpace2ndTo1stEnd;
9801 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9802 size_t alloc1stCount = 0;
9803 const VkDeviceSize freeSpace1stTo2ndEnd =
9804 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9805 while(lastOffset < freeSpace1stTo2ndEnd)
9808 while(nextAlloc1stIndex < suballoc1stCount &&
9809 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9811 ++nextAlloc1stIndex;
9815 if(nextAlloc1stIndex < suballoc1stCount)
9817 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9820 if(lastOffset < suballoc.offset)
9829 usedBytes += suballoc.size;
9832 lastOffset = suballoc.offset + suballoc.size;
9833 ++nextAlloc1stIndex;
9838 if(lastOffset < size)
9845 lastOffset = freeSpace1stTo2ndEnd;
9849 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9851 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9852 while(lastOffset < size)
9855 while(nextAlloc2ndIndex != SIZE_MAX &&
9856 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9858 --nextAlloc2ndIndex;
9862 if(nextAlloc2ndIndex != SIZE_MAX)
9864 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9867 if(lastOffset < suballoc.offset)
9876 usedBytes += suballoc.size;
9879 lastOffset = suballoc.offset + suballoc.size;
9880 --nextAlloc2ndIndex;
9885 if(lastOffset < size)
9897 const VkDeviceSize unusedBytes = size - usedBytes;
9898 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9903 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9905 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9906 size_t nextAlloc2ndIndex = 0;
9907 while(lastOffset < freeSpace2ndTo1stEnd)
9910 while(nextAlloc2ndIndex < suballoc2ndCount &&
9911 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9913 ++nextAlloc2ndIndex;
9917 if(nextAlloc2ndIndex < suballoc2ndCount)
9919 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9922 if(lastOffset < suballoc.offset)
9925 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9926 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9931 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9934 lastOffset = suballoc.offset + suballoc.size;
9935 ++nextAlloc2ndIndex;
9940 if(lastOffset < freeSpace2ndTo1stEnd)
9943 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9944 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9948 lastOffset = freeSpace2ndTo1stEnd;
9953 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9954 while(lastOffset < freeSpace1stTo2ndEnd)
9957 while(nextAlloc1stIndex < suballoc1stCount &&
9958 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9960 ++nextAlloc1stIndex;
9964 if(nextAlloc1stIndex < suballoc1stCount)
9966 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9969 if(lastOffset < suballoc.offset)
9972 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9973 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9978 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9981 lastOffset = suballoc.offset + suballoc.size;
9982 ++nextAlloc1stIndex;
9987 if(lastOffset < freeSpace1stTo2ndEnd)
9990 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9991 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9995 lastOffset = freeSpace1stTo2ndEnd;
9999 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10001 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10002 while(lastOffset < size)
10005 while(nextAlloc2ndIndex != SIZE_MAX &&
10006 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10008 --nextAlloc2ndIndex;
10012 if(nextAlloc2ndIndex != SIZE_MAX)
10014 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10017 if(lastOffset < suballoc.offset)
10020 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10021 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10026 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10029 lastOffset = suballoc.offset + suballoc.size;
10030 --nextAlloc2ndIndex;
10035 if(lastOffset < size)
10038 const VkDeviceSize unusedRangeSize = size - lastOffset;
10039 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10048 PrintDetailedMap_End(json);
10050 #endif // #if VMA_STATS_STRING_ENABLED
10052 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10053 uint32_t currentFrameIndex,
10054 uint32_t frameInUseCount,
10055 VkDeviceSize bufferImageGranularity,
10056 VkDeviceSize allocSize,
10057 VkDeviceSize allocAlignment,
10059 VmaSuballocationType allocType,
10060 bool canMakeOtherLost,
10062 VmaAllocationRequest* pAllocationRequest)
10064 VMA_ASSERT(allocSize > 0);
10065 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10066 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10067 VMA_HEAVY_ASSERT(Validate());
10068 return upperAddress ?
10069 CreateAllocationRequest_UpperAddress(
10070 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10071 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10072 CreateAllocationRequest_LowerAddress(
10073 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10074 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10077 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10078 uint32_t currentFrameIndex,
10079 uint32_t frameInUseCount,
10080 VkDeviceSize bufferImageGranularity,
10081 VkDeviceSize allocSize,
10082 VkDeviceSize allocAlignment,
10083 VmaSuballocationType allocType,
10084 bool canMakeOtherLost,
10086 VmaAllocationRequest* pAllocationRequest)
10088 const VkDeviceSize size = GetSize();
10089 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10090 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10092 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10094 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10099 if(allocSize > size)
10103 VkDeviceSize resultBaseOffset = size - allocSize;
10104 if(!suballocations2nd.empty())
10106 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10107 resultBaseOffset = lastSuballoc.offset - allocSize;
10108 if(allocSize > lastSuballoc.offset)
10115 VkDeviceSize resultOffset = resultBaseOffset;
10118 if(VMA_DEBUG_MARGIN > 0)
10120 if(resultOffset < VMA_DEBUG_MARGIN)
10124 resultOffset -= VMA_DEBUG_MARGIN;
10128 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10132 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10134 bool bufferImageGranularityConflict =
false;
10135 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10137 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10138 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10140 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10142 bufferImageGranularityConflict =
true;
10150 if(bufferImageGranularityConflict)
10152 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10157 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10158 suballocations1st.back().offset + suballocations1st.back().size :
10160 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10164 if(bufferImageGranularity > 1)
10166 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10168 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10169 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10171 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10185 pAllocationRequest->offset = resultOffset;
10186 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10187 pAllocationRequest->sumItemSize = 0;
10189 pAllocationRequest->itemsToMakeLostCount = 0;
10190 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10197 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10198 uint32_t currentFrameIndex,
10199 uint32_t frameInUseCount,
10200 VkDeviceSize bufferImageGranularity,
10201 VkDeviceSize allocSize,
10202 VkDeviceSize allocAlignment,
10203 VmaSuballocationType allocType,
10204 bool canMakeOtherLost,
10206 VmaAllocationRequest* pAllocationRequest)
10208 const VkDeviceSize size = GetSize();
10209 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10210 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10212 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10216 VkDeviceSize resultBaseOffset = 0;
10217 if(!suballocations1st.empty())
10219 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10220 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10224 VkDeviceSize resultOffset = resultBaseOffset;
10227 if(VMA_DEBUG_MARGIN > 0)
10229 resultOffset += VMA_DEBUG_MARGIN;
10233 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10237 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10239 bool bufferImageGranularityConflict =
false;
10240 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10242 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10243 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10245 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10247 bufferImageGranularityConflict =
true;
10255 if(bufferImageGranularityConflict)
10257 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10261 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10262 suballocations2nd.back().offset : size;
10265 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10269 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10271 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10273 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10274 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10276 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10290 pAllocationRequest->offset = resultOffset;
10291 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10292 pAllocationRequest->sumItemSize = 0;
10294 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10295 pAllocationRequest->itemsToMakeLostCount = 0;
10302 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10304 VMA_ASSERT(!suballocations1st.empty());
10306 VkDeviceSize resultBaseOffset = 0;
10307 if(!suballocations2nd.empty())
10309 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10310 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10314 VkDeviceSize resultOffset = resultBaseOffset;
10317 if(VMA_DEBUG_MARGIN > 0)
10319 resultOffset += VMA_DEBUG_MARGIN;
10323 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10327 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10329 bool bufferImageGranularityConflict =
false;
10330 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10332 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10333 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10335 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10337 bufferImageGranularityConflict =
true;
10345 if(bufferImageGranularityConflict)
10347 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10351 pAllocationRequest->itemsToMakeLostCount = 0;
10352 pAllocationRequest->sumItemSize = 0;
10353 size_t index1st = m_1stNullItemsBeginCount;
10355 if(canMakeOtherLost)
10357 while(index1st < suballocations1st.size() &&
10358 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10361 const VmaSuballocation& suballoc = suballocations1st[index1st];
10362 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10368 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10369 if(suballoc.hAllocation->CanBecomeLost() &&
10370 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10372 ++pAllocationRequest->itemsToMakeLostCount;
10373 pAllocationRequest->sumItemSize += suballoc.size;
10385 if(bufferImageGranularity > 1)
10387 while(index1st < suballocations1st.size())
10389 const VmaSuballocation& suballoc = suballocations1st[index1st];
10390 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10392 if(suballoc.hAllocation != VK_NULL_HANDLE)
10395 if(suballoc.hAllocation->CanBecomeLost() &&
10396 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10398 ++pAllocationRequest->itemsToMakeLostCount;
10399 pAllocationRequest->sumItemSize += suballoc.size;
10417 if(index1st == suballocations1st.size() &&
10418 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10421 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10426 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10427 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10431 if(bufferImageGranularity > 1)
10433 for(
size_t nextSuballocIndex = index1st;
10434 nextSuballocIndex < suballocations1st.size();
10435 nextSuballocIndex++)
10437 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10438 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10440 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10454 pAllocationRequest->offset = resultOffset;
10455 pAllocationRequest->sumFreeSize =
10456 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10458 - pAllocationRequest->sumItemSize;
10459 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10468 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10469 uint32_t currentFrameIndex,
10470 uint32_t frameInUseCount,
10471 VmaAllocationRequest* pAllocationRequest)
10473 if(pAllocationRequest->itemsToMakeLostCount == 0)
10478 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10481 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10482 size_t index = m_1stNullItemsBeginCount;
10483 size_t madeLostCount = 0;
10484 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10486 if(index == suballocations->size())
10490 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10492 suballocations = &AccessSuballocations2nd();
10496 VMA_ASSERT(!suballocations->empty());
10498 VmaSuballocation& suballoc = (*suballocations)[index];
10499 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10501 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10502 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10503 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10505 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10506 suballoc.hAllocation = VK_NULL_HANDLE;
10507 m_SumFreeSize += suballoc.size;
10508 if(suballocations == &AccessSuballocations1st())
10510 ++m_1stNullItemsMiddleCount;
10514 ++m_2ndNullItemsCount;
10526 CleanupAfterFree();
10532 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10534 uint32_t lostAllocationCount = 0;
10536 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10537 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10539 VmaSuballocation& suballoc = suballocations1st[i];
10540 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10541 suballoc.hAllocation->CanBecomeLost() &&
10542 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10544 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10545 suballoc.hAllocation = VK_NULL_HANDLE;
10546 ++m_1stNullItemsMiddleCount;
10547 m_SumFreeSize += suballoc.size;
10548 ++lostAllocationCount;
10552 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10553 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10555 VmaSuballocation& suballoc = suballocations2nd[i];
10556 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10557 suballoc.hAllocation->CanBecomeLost() &&
10558 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10560 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10561 suballoc.hAllocation = VK_NULL_HANDLE;
10562 ++m_2ndNullItemsCount;
10563 m_SumFreeSize += suballoc.size;
10564 ++lostAllocationCount;
10568 if(lostAllocationCount)
10570 CleanupAfterFree();
10573 return lostAllocationCount;
10576 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10578 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10579 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10581 const VmaSuballocation& suballoc = suballocations1st[i];
10582 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10584 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10586 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10587 return VK_ERROR_VALIDATION_FAILED_EXT;
10589 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10591 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10592 return VK_ERROR_VALIDATION_FAILED_EXT;
10597 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10598 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10600 const VmaSuballocation& suballoc = suballocations2nd[i];
10601 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10603 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10605 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10606 return VK_ERROR_VALIDATION_FAILED_EXT;
10608 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10610 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10611 return VK_ERROR_VALIDATION_FAILED_EXT;
10619 void VmaBlockMetadata_Linear::Alloc(
10620 const VmaAllocationRequest& request,
10621 VmaSuballocationType type,
10622 VkDeviceSize allocSize,
10625 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10627 switch(request.type)
10629 case VmaAllocationRequestType::UpperAddress:
10631 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10632 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10633 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10634 suballocations2nd.push_back(newSuballoc);
10635 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10638 case VmaAllocationRequestType::EndOf1st:
10640 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10642 VMA_ASSERT(suballocations1st.empty() ||
10643 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10645 VMA_ASSERT(request.offset + allocSize <= GetSize());
10647 suballocations1st.push_back(newSuballoc);
10650 case VmaAllocationRequestType::EndOf2nd:
10652 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10654 VMA_ASSERT(!suballocations1st.empty() &&
10655 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10656 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10658 switch(m_2ndVectorMode)
10660 case SECOND_VECTOR_EMPTY:
10662 VMA_ASSERT(suballocations2nd.empty());
10663 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10665 case SECOND_VECTOR_RING_BUFFER:
10667 VMA_ASSERT(!suballocations2nd.empty());
10669 case SECOND_VECTOR_DOUBLE_STACK:
10670 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10676 suballocations2nd.push_back(newSuballoc);
10680 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10683 m_SumFreeSize -= newSuballoc.size;
10686 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10688 FreeAtOffset(allocation->GetOffset());
10691 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10693 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10694 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10696 if(!suballocations1st.empty())
10699 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10700 if(firstSuballoc.offset == offset)
10702 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10703 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10704 m_SumFreeSize += firstSuballoc.size;
10705 ++m_1stNullItemsBeginCount;
10706 CleanupAfterFree();
10712 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10713 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10715 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10716 if(lastSuballoc.offset == offset)
10718 m_SumFreeSize += lastSuballoc.size;
10719 suballocations2nd.pop_back();
10720 CleanupAfterFree();
10725 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10727 VmaSuballocation& lastSuballoc = suballocations1st.back();
10728 if(lastSuballoc.offset == offset)
10730 m_SumFreeSize += lastSuballoc.size;
10731 suballocations1st.pop_back();
10732 CleanupAfterFree();
10739 VmaSuballocation refSuballoc;
10740 refSuballoc.offset = offset;
10742 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10743 suballocations1st.begin() + m_1stNullItemsBeginCount,
10744 suballocations1st.end(),
10746 VmaSuballocationOffsetLess());
10747 if(it != suballocations1st.end())
10749 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10750 it->hAllocation = VK_NULL_HANDLE;
10751 ++m_1stNullItemsMiddleCount;
10752 m_SumFreeSize += it->size;
10753 CleanupAfterFree();
10758 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10761 VmaSuballocation refSuballoc;
10762 refSuballoc.offset = offset;
10764 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10765 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10766 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10767 if(it != suballocations2nd.end())
10769 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10770 it->hAllocation = VK_NULL_HANDLE;
10771 ++m_2ndNullItemsCount;
10772 m_SumFreeSize += it->size;
10773 CleanupAfterFree();
10778 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10781 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10783 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10784 const size_t suballocCount = AccessSuballocations1st().size();
10785 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10788 void VmaBlockMetadata_Linear::CleanupAfterFree()
10790 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10791 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10795 suballocations1st.clear();
10796 suballocations2nd.clear();
10797 m_1stNullItemsBeginCount = 0;
10798 m_1stNullItemsMiddleCount = 0;
10799 m_2ndNullItemsCount = 0;
10800 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10804 const size_t suballoc1stCount = suballocations1st.size();
10805 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10806 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10809 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10810 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10812 ++m_1stNullItemsBeginCount;
10813 --m_1stNullItemsMiddleCount;
10817 while(m_1stNullItemsMiddleCount > 0 &&
10818 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10820 --m_1stNullItemsMiddleCount;
10821 suballocations1st.pop_back();
10825 while(m_2ndNullItemsCount > 0 &&
10826 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10828 --m_2ndNullItemsCount;
10829 suballocations2nd.pop_back();
10833 while(m_2ndNullItemsCount > 0 &&
10834 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10836 --m_2ndNullItemsCount;
10837 VmaVectorRemove(suballocations2nd, 0);
10840 if(ShouldCompact1st())
10842 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10843 size_t srcIndex = m_1stNullItemsBeginCount;
10844 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10846 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10850 if(dstIndex != srcIndex)
10852 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10856 suballocations1st.resize(nonNullItemCount);
10857 m_1stNullItemsBeginCount = 0;
10858 m_1stNullItemsMiddleCount = 0;
10862 if(suballocations2nd.empty())
10864 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10868 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10870 suballocations1st.clear();
10871 m_1stNullItemsBeginCount = 0;
10873 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10876 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10877 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10878 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10879 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10881 ++m_1stNullItemsBeginCount;
10882 --m_1stNullItemsMiddleCount;
10884 m_2ndNullItemsCount = 0;
10885 m_1stVectorIndex ^= 1;
10890 VMA_HEAVY_ASSERT(Validate());
10897 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10898 VmaBlockMetadata(hAllocator),
10900 m_AllocationCount(0),
10904 memset(m_FreeList, 0,
sizeof(m_FreeList));
10907 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10909 DeleteNode(m_Root);
10912 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10914 VmaBlockMetadata::Init(size);
10916 m_UsableSize = VmaPrevPow2(size);
10917 m_SumFreeSize = m_UsableSize;
10921 while(m_LevelCount < MAX_LEVELS &&
10922 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10927 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10928 rootNode->offset = 0;
10929 rootNode->type = Node::TYPE_FREE;
10930 rootNode->parent = VMA_NULL;
10931 rootNode->buddy = VMA_NULL;
10934 AddToFreeListFront(0, rootNode);
10937 bool VmaBlockMetadata_Buddy::Validate()
const
10940 ValidationContext ctx;
10941 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10943 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10945 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10946 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10949 for(uint32_t level = 0; level < m_LevelCount; ++level)
10951 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10952 m_FreeList[level].front->free.prev == VMA_NULL);
10954 for(Node* node = m_FreeList[level].front;
10956 node = node->free.next)
10958 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10960 if(node->free.next == VMA_NULL)
10962 VMA_VALIDATE(m_FreeList[level].back == node);
10966 VMA_VALIDATE(node->free.next->free.prev == node);
10972 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10974 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10980 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
10982 for(uint32_t level = 0; level < m_LevelCount; ++level)
10984 if(m_FreeList[level].front != VMA_NULL)
10986 return LevelToNodeSize(level);
10992 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10994 const VkDeviceSize unusableSize = GetUnusableSize();
11005 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11007 if(unusableSize > 0)
11016 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11018 const VkDeviceSize unusableSize = GetUnusableSize();
11020 inoutStats.
size += GetSize();
11021 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11026 if(unusableSize > 0)
11033 #if VMA_STATS_STRING_ENABLED
11035 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11039 CalcAllocationStatInfo(stat);
11041 PrintDetailedMap_Begin(
11047 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11049 const VkDeviceSize unusableSize = GetUnusableSize();
11050 if(unusableSize > 0)
11052 PrintDetailedMap_UnusedRange(json,
11057 PrintDetailedMap_End(json);
11060 #endif // #if VMA_STATS_STRING_ENABLED
11062 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11063 uint32_t currentFrameIndex,
11064 uint32_t frameInUseCount,
11065 VkDeviceSize bufferImageGranularity,
11066 VkDeviceSize allocSize,
11067 VkDeviceSize allocAlignment,
11069 VmaSuballocationType allocType,
11070 bool canMakeOtherLost,
11072 VmaAllocationRequest* pAllocationRequest)
11074 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11078 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11079 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11080 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11082 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11083 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11086 if(allocSize > m_UsableSize)
11091 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11092 for(uint32_t level = targetLevel + 1; level--; )
11094 for(Node* freeNode = m_FreeList[level].front;
11095 freeNode != VMA_NULL;
11096 freeNode = freeNode->free.next)
11098 if(freeNode->offset % allocAlignment == 0)
11100 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11101 pAllocationRequest->offset = freeNode->offset;
11102 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11103 pAllocationRequest->sumItemSize = 0;
11104 pAllocationRequest->itemsToMakeLostCount = 0;
11105 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11114 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11115 uint32_t currentFrameIndex,
11116 uint32_t frameInUseCount,
11117 VmaAllocationRequest* pAllocationRequest)
11123 return pAllocationRequest->itemsToMakeLostCount == 0;
11126 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11135 void VmaBlockMetadata_Buddy::Alloc(
11136 const VmaAllocationRequest& request,
11137 VmaSuballocationType type,
11138 VkDeviceSize allocSize,
11141 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11143 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11144 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11146 Node* currNode = m_FreeList[currLevel].front;
11147 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11148 while(currNode->offset != request.offset)
11150 currNode = currNode->free.next;
11151 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11155 while(currLevel < targetLevel)
11159 RemoveFromFreeList(currLevel, currNode);
11161 const uint32_t childrenLevel = currLevel + 1;
11164 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11165 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11167 leftChild->offset = currNode->offset;
11168 leftChild->type = Node::TYPE_FREE;
11169 leftChild->parent = currNode;
11170 leftChild->buddy = rightChild;
11172 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11173 rightChild->type = Node::TYPE_FREE;
11174 rightChild->parent = currNode;
11175 rightChild->buddy = leftChild;
11178 currNode->type = Node::TYPE_SPLIT;
11179 currNode->split.leftChild = leftChild;
11182 AddToFreeListFront(childrenLevel, rightChild);
11183 AddToFreeListFront(childrenLevel, leftChild);
11188 currNode = m_FreeList[currLevel].front;
11197 VMA_ASSERT(currLevel == targetLevel &&
11198 currNode != VMA_NULL &&
11199 currNode->type == Node::TYPE_FREE);
11200 RemoveFromFreeList(currLevel, currNode);
11203 currNode->type = Node::TYPE_ALLOCATION;
11204 currNode->allocation.alloc = hAllocation;
11206 ++m_AllocationCount;
11208 m_SumFreeSize -= allocSize;
11211 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11213 if(node->type == Node::TYPE_SPLIT)
11215 DeleteNode(node->split.leftChild->buddy);
11216 DeleteNode(node->split.leftChild);
11219 vma_delete(GetAllocationCallbacks(), node);
11222 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11224 VMA_VALIDATE(level < m_LevelCount);
11225 VMA_VALIDATE(curr->parent == parent);
11226 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11227 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11230 case Node::TYPE_FREE:
11232 ctx.calculatedSumFreeSize += levelNodeSize;
11233 ++ctx.calculatedFreeCount;
11235 case Node::TYPE_ALLOCATION:
11236 ++ctx.calculatedAllocationCount;
11237 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11238 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11240 case Node::TYPE_SPLIT:
11242 const uint32_t childrenLevel = level + 1;
11243 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11244 const Node*
const leftChild = curr->split.leftChild;
11245 VMA_VALIDATE(leftChild != VMA_NULL);
11246 VMA_VALIDATE(leftChild->offset == curr->offset);
11247 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11249 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11251 const Node*
const rightChild = leftChild->buddy;
11252 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11253 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11255 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11266 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11269 uint32_t level = 0;
11270 VkDeviceSize currLevelNodeSize = m_UsableSize;
11271 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11272 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11275 currLevelNodeSize = nextLevelNodeSize;
11276 nextLevelNodeSize = currLevelNodeSize >> 1;
11281 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11284 Node* node = m_Root;
11285 VkDeviceSize nodeOffset = 0;
11286 uint32_t level = 0;
11287 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11288 while(node->type == Node::TYPE_SPLIT)
11290 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11291 if(offset < nodeOffset + nextLevelSize)
11293 node = node->split.leftChild;
11297 node = node->split.leftChild->buddy;
11298 nodeOffset += nextLevelSize;
11301 levelNodeSize = nextLevelSize;
11304 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11305 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11308 --m_AllocationCount;
11309 m_SumFreeSize += alloc->GetSize();
11311 node->type = Node::TYPE_FREE;
11314 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11316 RemoveFromFreeList(level, node->buddy);
11317 Node*
const parent = node->parent;
11319 vma_delete(GetAllocationCallbacks(), node->buddy);
11320 vma_delete(GetAllocationCallbacks(), node);
11321 parent->type = Node::TYPE_FREE;
11329 AddToFreeListFront(level, node);
11332 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11336 case Node::TYPE_FREE:
11342 case Node::TYPE_ALLOCATION:
11344 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11350 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11351 if(unusedRangeSize > 0)
11360 case Node::TYPE_SPLIT:
11362 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11363 const Node*
const leftChild = node->split.leftChild;
11364 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11365 const Node*
const rightChild = leftChild->buddy;
11366 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11374 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11376 VMA_ASSERT(node->type == Node::TYPE_FREE);
11379 Node*
const frontNode = m_FreeList[level].front;
11380 if(frontNode == VMA_NULL)
11382 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11383 node->free.prev = node->free.next = VMA_NULL;
11384 m_FreeList[level].front = m_FreeList[level].back = node;
11388 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11389 node->free.prev = VMA_NULL;
11390 node->free.next = frontNode;
11391 frontNode->free.prev = node;
11392 m_FreeList[level].front = node;
11396 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11398 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11401 if(node->free.prev == VMA_NULL)
11403 VMA_ASSERT(m_FreeList[level].front == node);
11404 m_FreeList[level].front = node->free.next;
11408 Node*
const prevFreeNode = node->free.prev;
11409 VMA_ASSERT(prevFreeNode->free.next == node);
11410 prevFreeNode->free.next = node->free.next;
11414 if(node->free.next == VMA_NULL)
11416 VMA_ASSERT(m_FreeList[level].back == node);
11417 m_FreeList[level].back = node->free.prev;
11421 Node*
const nextFreeNode = node->free.next;
11422 VMA_ASSERT(nextFreeNode->free.prev == node);
11423 nextFreeNode->free.prev = node->free.prev;
11427 #if VMA_STATS_STRING_ENABLED
11428 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11432 case Node::TYPE_FREE:
11433 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11435 case Node::TYPE_ALLOCATION:
11437 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11438 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11439 if(allocSize < levelNodeSize)
11441 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11445 case Node::TYPE_SPLIT:
11447 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11448 const Node*
const leftChild = node->split.leftChild;
11449 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11450 const Node*
const rightChild = leftChild->buddy;
11451 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11458 #endif // #if VMA_STATS_STRING_ENABLED
11464 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11465 m_pMetadata(VMA_NULL),
11466 m_MemoryTypeIndex(UINT32_MAX),
11468 m_hMemory(VK_NULL_HANDLE),
11470 m_pMappedData(VMA_NULL)
11474 void VmaDeviceMemoryBlock::Init(
11477 uint32_t newMemoryTypeIndex,
11478 VkDeviceMemory newMemory,
11479 VkDeviceSize newSize,
11481 uint32_t algorithm)
11483 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11485 m_hParentPool = hParentPool;
11486 m_MemoryTypeIndex = newMemoryTypeIndex;
11488 m_hMemory = newMemory;
11493 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11496 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11502 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11504 m_pMetadata->Init(newSize);
11507 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11511 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11513 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11514 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11515 m_hMemory = VK_NULL_HANDLE;
11517 vma_delete(allocator, m_pMetadata);
11518 m_pMetadata = VMA_NULL;
11521 bool VmaDeviceMemoryBlock::Validate()
const
11523 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11524 (m_pMetadata->GetSize() != 0));
11526 return m_pMetadata->Validate();
11529 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11531 void* pData =
nullptr;
11532 VkResult res = Map(hAllocator, 1, &pData);
11533 if(res != VK_SUCCESS)
11538 res = m_pMetadata->CheckCorruption(pData);
11540 Unmap(hAllocator, 1);
11545 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11552 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11553 if(m_MapCount != 0)
11555 m_MapCount += count;
11556 VMA_ASSERT(m_pMappedData != VMA_NULL);
11557 if(ppData != VMA_NULL)
11559 *ppData = m_pMappedData;
11565 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11566 hAllocator->m_hDevice,
11572 if(result == VK_SUCCESS)
11574 if(ppData != VMA_NULL)
11576 *ppData = m_pMappedData;
11578 m_MapCount = count;
11584 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11591 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11592 if(m_MapCount >= count)
11594 m_MapCount -= count;
11595 if(m_MapCount == 0)
11597 m_pMappedData = VMA_NULL;
11598 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11603 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11607 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11609 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11610 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11613 VkResult res = Map(hAllocator, 1, &pData);
11614 if(res != VK_SUCCESS)
11619 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11620 VmaWriteMagicValue(pData, allocOffset + allocSize);
11622 Unmap(hAllocator, 1);
11627 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11629 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11630 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11633 VkResult res = Map(hAllocator, 1, &pData);
11634 if(res != VK_SUCCESS)
11639 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11641 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11643 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11645 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11648 Unmap(hAllocator, 1);
11653 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11656 VkDeviceSize allocationLocalOffset,
11660 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11661 hAllocation->GetBlock() ==
this);
11662 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11663 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11664 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11666 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11667 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11670 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11673 VkDeviceSize allocationLocalOffset,
11677 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11678 hAllocation->GetBlock() ==
this);
11679 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11680 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11681 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11683 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11684 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11689 memset(&outInfo, 0,
sizeof(outInfo));
11708 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11716 VmaPool_T::VmaPool_T(
11719 VkDeviceSize preferredBlockSize) :
11723 createInfo.memoryTypeIndex,
11724 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11725 createInfo.minBlockCount,
11726 createInfo.maxBlockCount,
11728 createInfo.frameInUseCount,
11729 createInfo.blockSize != 0,
11736 VmaPool_T::~VmaPool_T()
11740 void VmaPool_T::SetName(
const char* pName)
11742 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
11743 VmaFreeString(allocs, m_Name);
11745 if(pName != VMA_NULL)
11747 m_Name = VmaCreateStringCopy(allocs, pName);
11755 #if VMA_STATS_STRING_ENABLED
11757 #endif // #if VMA_STATS_STRING_ENABLED
11759 VmaBlockVector::VmaBlockVector(
11762 uint32_t memoryTypeIndex,
11763 VkDeviceSize preferredBlockSize,
11764 size_t minBlockCount,
11765 size_t maxBlockCount,
11766 VkDeviceSize bufferImageGranularity,
11767 uint32_t frameInUseCount,
11768 bool explicitBlockSize,
11769 uint32_t algorithm) :
11770 m_hAllocator(hAllocator),
11771 m_hParentPool(hParentPool),
11772 m_MemoryTypeIndex(memoryTypeIndex),
11773 m_PreferredBlockSize(preferredBlockSize),
11774 m_MinBlockCount(minBlockCount),
11775 m_MaxBlockCount(maxBlockCount),
11776 m_BufferImageGranularity(bufferImageGranularity),
11777 m_FrameInUseCount(frameInUseCount),
11778 m_ExplicitBlockSize(explicitBlockSize),
11779 m_Algorithm(algorithm),
11780 m_HasEmptyBlock(false),
11781 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11786 VmaBlockVector::~VmaBlockVector()
11788 for(
size_t i = m_Blocks.size(); i--; )
11790 m_Blocks[i]->Destroy(m_hAllocator);
11791 vma_delete(m_hAllocator, m_Blocks[i]);
11795 VkResult VmaBlockVector::CreateMinBlocks()
11797 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11799 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11800 if(res != VK_SUCCESS)
11808 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11810 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11812 const size_t blockCount = m_Blocks.size();
11821 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11823 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11824 VMA_ASSERT(pBlock);
11825 VMA_HEAVY_ASSERT(pBlock->Validate());
11826 pBlock->m_pMetadata->AddPoolStats(*pStats);
11830 bool VmaBlockVector::IsEmpty()
11832 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11833 return m_Blocks.empty();
11836 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
11838 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11839 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11840 (VMA_DEBUG_MARGIN > 0) &&
11842 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11845 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11847 VkResult VmaBlockVector::Allocate(
11848 uint32_t currentFrameIndex,
11850 VkDeviceSize alignment,
11852 VmaSuballocationType suballocType,
11853 size_t allocationCount,
11857 VkResult res = VK_SUCCESS;
11859 if(IsCorruptionDetectionEnabled())
11861 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11862 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11866 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11867 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11869 res = AllocatePage(
11875 pAllocations + allocIndex);
11876 if(res != VK_SUCCESS)
11883 if(res != VK_SUCCESS)
11886 while(allocIndex--)
11888 Free(pAllocations[allocIndex]);
11890 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11896 VkResult VmaBlockVector::AllocatePage(
11897 uint32_t currentFrameIndex,
11899 VkDeviceSize alignment,
11901 VmaSuballocationType suballocType,
11910 VkDeviceSize freeMemory;
11912 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
11914 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
11918 const bool canFallbackToDedicated = !IsCustomPool();
11919 const bool canCreateNewBlock =
11921 (m_Blocks.size() < m_MaxBlockCount) &&
11922 (freeMemory >= size || !canFallbackToDedicated);
11929 canMakeOtherLost =
false;
11933 if(isUpperAddress &&
11936 return VK_ERROR_FEATURE_NOT_PRESENT;
11950 return VK_ERROR_FEATURE_NOT_PRESENT;
11954 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11956 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11964 if(!canMakeOtherLost || canCreateNewBlock)
11973 if(!m_Blocks.empty())
11975 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11976 VMA_ASSERT(pCurrBlock);
11977 VkResult res = AllocateFromBlock(
11987 if(res == VK_SUCCESS)
11989 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
11999 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12001 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12002 VMA_ASSERT(pCurrBlock);
12003 VkResult res = AllocateFromBlock(
12013 if(res == VK_SUCCESS)
12015 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12023 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12025 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12026 VMA_ASSERT(pCurrBlock);
12027 VkResult res = AllocateFromBlock(
12037 if(res == VK_SUCCESS)
12039 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12047 if(canCreateNewBlock)
12050 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12051 uint32_t newBlockSizeShift = 0;
12052 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12054 if(!m_ExplicitBlockSize)
12057 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12058 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12060 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12061 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12063 newBlockSize = smallerNewBlockSize;
12064 ++newBlockSizeShift;
12073 size_t newBlockIndex = 0;
12074 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12075 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12077 if(!m_ExplicitBlockSize)
12079 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12081 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12082 if(smallerNewBlockSize >= size)
12084 newBlockSize = smallerNewBlockSize;
12085 ++newBlockSizeShift;
12086 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12087 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12096 if(res == VK_SUCCESS)
12098 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12099 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12101 res = AllocateFromBlock(
12111 if(res == VK_SUCCESS)
12113 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12119 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12126 if(canMakeOtherLost)
12128 uint32_t tryIndex = 0;
12129 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12131 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12132 VmaAllocationRequest bestRequest = {};
12133 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12139 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12141 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12142 VMA_ASSERT(pCurrBlock);
12143 VmaAllocationRequest currRequest = {};
12144 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12147 m_BufferImageGranularity,
12156 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12157 if(pBestRequestBlock == VMA_NULL ||
12158 currRequestCost < bestRequestCost)
12160 pBestRequestBlock = pCurrBlock;
12161 bestRequest = currRequest;
12162 bestRequestCost = currRequestCost;
12164 if(bestRequestCost == 0)
12175 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12177 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12178 VMA_ASSERT(pCurrBlock);
12179 VmaAllocationRequest currRequest = {};
12180 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12183 m_BufferImageGranularity,
12192 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12193 if(pBestRequestBlock == VMA_NULL ||
12194 currRequestCost < bestRequestCost ||
12197 pBestRequestBlock = pCurrBlock;
12198 bestRequest = currRequest;
12199 bestRequestCost = currRequestCost;
12201 if(bestRequestCost == 0 ||
12211 if(pBestRequestBlock != VMA_NULL)
12215 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12216 if(res != VK_SUCCESS)
12222 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12228 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12229 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12230 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12231 UpdateHasEmptyBlock();
12232 (*pAllocation)->InitBlockAllocation(
12234 bestRequest.offset,
12241 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12242 VMA_DEBUG_LOG(
" Returned from existing block");
12243 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12244 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12245 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12247 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12249 if(IsCorruptionDetectionEnabled())
12251 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12252 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12267 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12269 return VK_ERROR_TOO_MANY_OBJECTS;
12273 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12276 void VmaBlockVector::Free(
12279 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12281 bool budgetExceeded =
false;
12283 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12285 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12286 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12291 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12293 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12295 if(IsCorruptionDetectionEnabled())
12297 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12298 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12301 if(hAllocation->IsPersistentMap())
12303 pBlock->Unmap(m_hAllocator, 1);
12306 pBlock->m_pMetadata->Free(hAllocation);
12307 VMA_HEAVY_ASSERT(pBlock->Validate());
12309 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12311 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12313 if(pBlock->m_pMetadata->IsEmpty())
12316 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12318 pBlockToDelete = pBlock;
12325 else if(m_HasEmptyBlock && canDeleteBlock)
12327 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12328 if(pLastBlock->m_pMetadata->IsEmpty())
12330 pBlockToDelete = pLastBlock;
12331 m_Blocks.pop_back();
12335 UpdateHasEmptyBlock();
12336 IncrementallySortBlocks();
12341 if(pBlockToDelete != VMA_NULL)
12343 VMA_DEBUG_LOG(
" Deleted empty block");
12344 pBlockToDelete->Destroy(m_hAllocator);
12345 vma_delete(m_hAllocator, pBlockToDelete);
12349 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12351 VkDeviceSize result = 0;
12352 for(
size_t i = m_Blocks.size(); i--; )
12354 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12355 if(result >= m_PreferredBlockSize)
12363 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12365 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12367 if(m_Blocks[blockIndex] == pBlock)
12369 VmaVectorRemove(m_Blocks, blockIndex);
12376 void VmaBlockVector::IncrementallySortBlocks()
12381 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12383 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12385 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12392 VkResult VmaBlockVector::AllocateFromBlock(
12393 VmaDeviceMemoryBlock* pBlock,
12394 uint32_t currentFrameIndex,
12396 VkDeviceSize alignment,
12399 VmaSuballocationType suballocType,
12408 VmaAllocationRequest currRequest = {};
12409 if(pBlock->m_pMetadata->CreateAllocationRequest(
12412 m_BufferImageGranularity,
12422 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12426 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12427 if(res != VK_SUCCESS)
12433 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12434 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12435 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12436 UpdateHasEmptyBlock();
12437 (*pAllocation)->InitBlockAllocation(
12439 currRequest.offset,
12446 VMA_HEAVY_ASSERT(pBlock->Validate());
12447 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12448 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12449 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12451 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12453 if(IsCorruptionDetectionEnabled())
12455 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12456 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12460 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12463 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12465 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12466 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12467 allocInfo.allocationSize = blockSize;
12468 VkDeviceMemory mem = VK_NULL_HANDLE;
12469 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12478 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12484 allocInfo.allocationSize,
12488 m_Blocks.push_back(pBlock);
12489 if(pNewBlockIndex != VMA_NULL)
12491 *pNewBlockIndex = m_Blocks.size() - 1;
12497 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12498 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12499 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12501 const size_t blockCount = m_Blocks.size();
12502 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12506 BLOCK_FLAG_USED = 0x00000001,
12507 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12515 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12516 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12517 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12520 const size_t moveCount = moves.size();
12521 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12523 const VmaDefragmentationMove& move = moves[moveIndex];
12524 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12525 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12528 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12531 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12533 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12534 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12535 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12537 currBlockInfo.pMappedData = pBlock->GetMappedData();
12539 if(currBlockInfo.pMappedData == VMA_NULL)
12541 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12542 if(pDefragCtx->res == VK_SUCCESS)
12544 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12551 if(pDefragCtx->res == VK_SUCCESS)
12553 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12554 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12556 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12558 const VmaDefragmentationMove& move = moves[moveIndex];
12560 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12561 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12563 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12568 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12569 memRange.memory = pSrcBlock->GetDeviceMemory();
12570 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12571 memRange.size = VMA_MIN(
12572 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12573 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12574 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12579 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12580 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12581 static_cast<size_t>(move.size));
12583 if(IsCorruptionDetectionEnabled())
12585 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12586 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12592 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12593 memRange.memory = pDstBlock->GetDeviceMemory();
12594 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12595 memRange.size = VMA_MIN(
12596 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12597 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12598 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12605 for(
size_t blockIndex = blockCount; blockIndex--; )
12607 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12608 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12610 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12611 pBlock->Unmap(m_hAllocator, 1);
12616 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12617 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12618 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12619 VkCommandBuffer commandBuffer)
12621 const size_t blockCount = m_Blocks.size();
12623 pDefragCtx->blockContexts.resize(blockCount);
12624 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12627 const size_t moveCount = moves.size();
12628 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12630 const VmaDefragmentationMove& move = moves[moveIndex];
12631 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12632 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12635 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12639 VkBufferCreateInfo bufCreateInfo;
12640 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12642 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12644 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12645 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12646 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12648 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12649 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12650 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12651 if(pDefragCtx->res == VK_SUCCESS)
12653 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12654 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12661 if(pDefragCtx->res == VK_SUCCESS)
12663 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12665 const VmaDefragmentationMove& move = moves[moveIndex];
12667 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12668 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12670 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12672 VkBufferCopy region = {
12676 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12677 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12682 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12684 pDefragCtx->res = VK_NOT_READY;
12690 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12692 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12693 if(pBlock->m_pMetadata->IsEmpty())
12695 if(m_Blocks.size() > m_MinBlockCount)
12697 if(pDefragmentationStats != VMA_NULL)
12700 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12703 VmaVectorRemove(m_Blocks, blockIndex);
12704 pBlock->Destroy(m_hAllocator);
12705 vma_delete(m_hAllocator, pBlock);
12713 UpdateHasEmptyBlock();
12716 void VmaBlockVector::UpdateHasEmptyBlock()
12718 m_HasEmptyBlock =
false;
12719 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12721 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12722 if(pBlock->m_pMetadata->IsEmpty())
12724 m_HasEmptyBlock =
true;
12730 #if VMA_STATS_STRING_ENABLED
12732 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12734 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12736 json.BeginObject();
12740 const char* poolName = m_hParentPool->GetName();
12741 if(poolName != VMA_NULL && poolName[0] !=
'\0')
12743 json.WriteString(
"Name");
12744 json.WriteString(poolName);
12747 json.WriteString(
"MemoryTypeIndex");
12748 json.WriteNumber(m_MemoryTypeIndex);
12750 json.WriteString(
"BlockSize");
12751 json.WriteNumber(m_PreferredBlockSize);
12753 json.WriteString(
"BlockCount");
12754 json.BeginObject(
true);
12755 if(m_MinBlockCount > 0)
12757 json.WriteString(
"Min");
12758 json.WriteNumber((uint64_t)m_MinBlockCount);
12760 if(m_MaxBlockCount < SIZE_MAX)
12762 json.WriteString(
"Max");
12763 json.WriteNumber((uint64_t)m_MaxBlockCount);
12765 json.WriteString(
"Cur");
12766 json.WriteNumber((uint64_t)m_Blocks.size());
12769 if(m_FrameInUseCount > 0)
12771 json.WriteString(
"FrameInUseCount");
12772 json.WriteNumber(m_FrameInUseCount);
12775 if(m_Algorithm != 0)
12777 json.WriteString(
"Algorithm");
12778 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12783 json.WriteString(
"PreferredBlockSize");
12784 json.WriteNumber(m_PreferredBlockSize);
12787 json.WriteString(
"Blocks");
12788 json.BeginObject();
12789 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12791 json.BeginString();
12792 json.ContinueString(m_Blocks[i]->GetId());
12795 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12802 #endif // #if VMA_STATS_STRING_ENABLED
12804 void VmaBlockVector::Defragment(
12805 class VmaBlockVectorDefragmentationContext* pCtx,
12807 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12808 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12809 VkCommandBuffer commandBuffer)
12811 pCtx->res = VK_SUCCESS;
12813 const VkMemoryPropertyFlags memPropFlags =
12814 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12815 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12817 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12819 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12820 !IsCorruptionDetectionEnabled() &&
12821 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12824 if(canDefragmentOnCpu || canDefragmentOnGpu)
12826 bool defragmentOnGpu;
12828 if(canDefragmentOnGpu != canDefragmentOnCpu)
12830 defragmentOnGpu = canDefragmentOnGpu;
12835 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12836 m_hAllocator->IsIntegratedGpu();
12839 bool overlappingMoveSupported = !defragmentOnGpu;
12841 if(m_hAllocator->m_UseMutex)
12843 m_Mutex.LockWrite();
12844 pCtx->mutexLocked =
true;
12847 pCtx->Begin(overlappingMoveSupported);
12851 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12852 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12853 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12854 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12855 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12858 if(pStats != VMA_NULL)
12860 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12861 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12864 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12865 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12866 if(defragmentOnGpu)
12868 maxGpuBytesToMove -= bytesMoved;
12869 maxGpuAllocationsToMove -= allocationsMoved;
12873 maxCpuBytesToMove -= bytesMoved;
12874 maxCpuAllocationsToMove -= allocationsMoved;
12878 if(pCtx->res >= VK_SUCCESS)
12880 if(defragmentOnGpu)
12882 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12886 ApplyDefragmentationMovesCpu(pCtx, moves);
12892 void VmaBlockVector::DefragmentationEnd(
12893 class VmaBlockVectorDefragmentationContext* pCtx,
12897 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12899 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12900 if(blockCtx.hBuffer)
12902 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12903 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12907 if(pCtx->res >= VK_SUCCESS)
12909 FreeEmptyBlocks(pStats);
12912 if(pCtx->mutexLocked)
12914 VMA_ASSERT(m_hAllocator->m_UseMutex);
12915 m_Mutex.UnlockWrite();
12919 size_t VmaBlockVector::CalcAllocationCount()
const
12922 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12924 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12929 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
12931 if(m_BufferImageGranularity == 1)
12935 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12936 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12938 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12939 VMA_ASSERT(m_Algorithm == 0);
12940 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12941 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12949 void VmaBlockVector::MakePoolAllocationsLost(
12950 uint32_t currentFrameIndex,
12951 size_t* pLostAllocationCount)
12953 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12954 size_t lostAllocationCount = 0;
12955 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12957 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12958 VMA_ASSERT(pBlock);
12959 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12961 if(pLostAllocationCount != VMA_NULL)
12963 *pLostAllocationCount = lostAllocationCount;
12967 VkResult VmaBlockVector::CheckCorruption()
12969 if(!IsCorruptionDetectionEnabled())
12971 return VK_ERROR_FEATURE_NOT_PRESENT;
12974 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12975 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12977 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12978 VMA_ASSERT(pBlock);
12979 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12980 if(res != VK_SUCCESS)
12988 void VmaBlockVector::AddStats(
VmaStats* pStats)
12990 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12991 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12993 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12995 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12997 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12998 VMA_ASSERT(pBlock);
12999 VMA_HEAVY_ASSERT(pBlock->Validate());
13001 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13002 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13003 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13004 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13011 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13013 VmaBlockVector* pBlockVector,
13014 uint32_t currentFrameIndex,
13015 bool overlappingMoveSupported) :
13016 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13017 m_AllocationCount(0),
13018 m_AllAllocations(false),
13020 m_AllocationsMoved(0),
13021 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13024 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13025 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13027 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13028 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13029 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13030 m_Blocks.push_back(pBlockInfo);
13034 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13037 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13039 for(
size_t i = m_Blocks.size(); i--; )
13041 vma_delete(m_hAllocator, m_Blocks[i]);
13045 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13048 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13050 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13051 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13052 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13054 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13055 (*it)->m_Allocations.push_back(allocInfo);
13062 ++m_AllocationCount;
13066 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13067 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13068 VkDeviceSize maxBytesToMove,
13069 uint32_t maxAllocationsToMove)
13071 if(m_Blocks.empty())
13084 size_t srcBlockMinIndex = 0;
13097 size_t srcBlockIndex = m_Blocks.size() - 1;
13098 size_t srcAllocIndex = SIZE_MAX;
13104 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13106 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13109 if(srcBlockIndex == srcBlockMinIndex)
13116 srcAllocIndex = SIZE_MAX;
13121 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13125 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13126 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13128 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13129 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13130 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13131 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13134 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13136 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13137 VmaAllocationRequest dstAllocRequest;
13138 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13139 m_CurrentFrameIndex,
13140 m_pBlockVector->GetFrameInUseCount(),
13141 m_pBlockVector->GetBufferImageGranularity(),
13148 &dstAllocRequest) &&
13150 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13152 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13155 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13156 (m_BytesMoved + size > maxBytesToMove))
13161 VmaDefragmentationMove move;
13162 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13163 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13164 move.srcOffset = srcOffset;
13165 move.dstOffset = dstAllocRequest.offset;
13167 moves.push_back(move);
13169 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13173 allocInfo.m_hAllocation);
13174 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13176 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13178 if(allocInfo.m_pChanged != VMA_NULL)
13180 *allocInfo.m_pChanged = VK_TRUE;
13183 ++m_AllocationsMoved;
13184 m_BytesMoved += size;
13186 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13194 if(srcAllocIndex > 0)
13200 if(srcBlockIndex > 0)
13203 srcAllocIndex = SIZE_MAX;
13213 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13216 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13218 if(m_Blocks[i]->m_HasNonMovableAllocations)
13226 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13227 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13228 VkDeviceSize maxBytesToMove,
13229 uint32_t maxAllocationsToMove)
13231 if(!m_AllAllocations && m_AllocationCount == 0)
13236 const size_t blockCount = m_Blocks.size();
13237 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13239 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13241 if(m_AllAllocations)
13243 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13244 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13245 it != pMetadata->m_Suballocations.end();
13248 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13250 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13251 pBlockInfo->m_Allocations.push_back(allocInfo);
13256 pBlockInfo->CalcHasNonMovableAllocations();
13260 pBlockInfo->SortAllocationsByOffsetDescending();
13266 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13269 const uint32_t roundCount = 2;
13272 VkResult result = VK_SUCCESS;
13273 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13275 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
13281 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13282 size_t dstBlockIndex, VkDeviceSize dstOffset,
13283 size_t srcBlockIndex, VkDeviceSize srcOffset)
13285 if(dstBlockIndex < srcBlockIndex)
13289 if(dstBlockIndex > srcBlockIndex)
13293 if(dstOffset < srcOffset)
13303 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13305 VmaBlockVector* pBlockVector,
13306 uint32_t currentFrameIndex,
13307 bool overlappingMoveSupported) :
13308 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13309 m_OverlappingMoveSupported(overlappingMoveSupported),
13310 m_AllocationCount(0),
13311 m_AllAllocations(false),
13313 m_AllocationsMoved(0),
13314 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13316 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13320 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13324 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13325 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13326 VkDeviceSize maxBytesToMove,
13327 uint32_t maxAllocationsToMove)
13329 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13331 const size_t blockCount = m_pBlockVector->GetBlockCount();
13332 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13337 PreprocessMetadata();
13341 m_BlockInfos.resize(blockCount);
13342 for(
size_t i = 0; i < blockCount; ++i)
13344 m_BlockInfos[i].origBlockIndex = i;
13347 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13348 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13349 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13354 FreeSpaceDatabase freeSpaceDb;
13356 size_t dstBlockInfoIndex = 0;
13357 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13358 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13359 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13360 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13361 VkDeviceSize dstOffset = 0;
13364 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13366 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13367 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13368 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13369 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13370 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13372 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13373 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13374 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13375 if(m_AllocationsMoved == maxAllocationsToMove ||
13376 m_BytesMoved + srcAllocSize > maxBytesToMove)
13381 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13384 size_t freeSpaceInfoIndex;
13385 VkDeviceSize dstAllocOffset;
13386 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13387 freeSpaceInfoIndex, dstAllocOffset))
13389 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13390 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13391 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13394 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13396 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13400 VmaSuballocation suballoc = *srcSuballocIt;
13401 suballoc.offset = dstAllocOffset;
13402 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13403 m_BytesMoved += srcAllocSize;
13404 ++m_AllocationsMoved;
13406 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13408 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13409 srcSuballocIt = nextSuballocIt;
13411 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13413 VmaDefragmentationMove move = {
13414 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13415 srcAllocOffset, dstAllocOffset,
13417 moves.push_back(move);
13424 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13426 VmaSuballocation suballoc = *srcSuballocIt;
13427 suballoc.offset = dstAllocOffset;
13428 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13429 m_BytesMoved += srcAllocSize;
13430 ++m_AllocationsMoved;
13432 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13434 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13435 srcSuballocIt = nextSuballocIt;
13437 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13439 VmaDefragmentationMove move = {
13440 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13441 srcAllocOffset, dstAllocOffset,
13443 moves.push_back(move);
13448 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13451 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13452 dstAllocOffset + srcAllocSize > dstBlockSize)
13455 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13457 ++dstBlockInfoIndex;
13458 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13459 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13460 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13461 dstBlockSize = pDstMetadata->GetSize();
13463 dstAllocOffset = 0;
13467 if(dstBlockInfoIndex == srcBlockInfoIndex)
13469 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13471 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13473 bool skipOver = overlap;
13474 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13478 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13483 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13485 dstOffset = srcAllocOffset + srcAllocSize;
13491 srcSuballocIt->offset = dstAllocOffset;
13492 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13493 dstOffset = dstAllocOffset + srcAllocSize;
13494 m_BytesMoved += srcAllocSize;
13495 ++m_AllocationsMoved;
13497 VmaDefragmentationMove move = {
13498 srcOrigBlockIndex, dstOrigBlockIndex,
13499 srcAllocOffset, dstAllocOffset,
13501 moves.push_back(move);
13509 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13510 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13512 VmaSuballocation suballoc = *srcSuballocIt;
13513 suballoc.offset = dstAllocOffset;
13514 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13515 dstOffset = dstAllocOffset + srcAllocSize;
13516 m_BytesMoved += srcAllocSize;
13517 ++m_AllocationsMoved;
13519 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13521 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13522 srcSuballocIt = nextSuballocIt;
13524 pDstMetadata->m_Suballocations.push_back(suballoc);
13526 VmaDefragmentationMove move = {
13527 srcOrigBlockIndex, dstOrigBlockIndex,
13528 srcAllocOffset, dstAllocOffset,
13530 moves.push_back(move);
13536 m_BlockInfos.clear();
13538 PostprocessMetadata();
13543 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13545 const size_t blockCount = m_pBlockVector->GetBlockCount();
13546 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13548 VmaBlockMetadata_Generic*
const pMetadata =
13549 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13550 pMetadata->m_FreeCount = 0;
13551 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13552 pMetadata->m_FreeSuballocationsBySize.clear();
13553 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13554 it != pMetadata->m_Suballocations.end(); )
13556 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13558 VmaSuballocationList::iterator nextIt = it;
13560 pMetadata->m_Suballocations.erase(it);
13571 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13573 const size_t blockCount = m_pBlockVector->GetBlockCount();
13574 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13576 VmaBlockMetadata_Generic*
const pMetadata =
13577 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13578 const VkDeviceSize blockSize = pMetadata->GetSize();
13581 if(pMetadata->m_Suballocations.empty())
13583 pMetadata->m_FreeCount = 1;
13585 VmaSuballocation suballoc = {
13589 VMA_SUBALLOCATION_TYPE_FREE };
13590 pMetadata->m_Suballocations.push_back(suballoc);
13591 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13596 VkDeviceSize offset = 0;
13597 VmaSuballocationList::iterator it;
13598 for(it = pMetadata->m_Suballocations.begin();
13599 it != pMetadata->m_Suballocations.end();
13602 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13603 VMA_ASSERT(it->offset >= offset);
13606 if(it->offset > offset)
13608 ++pMetadata->m_FreeCount;
13609 const VkDeviceSize freeSize = it->offset - offset;
13610 VmaSuballocation suballoc = {
13614 VMA_SUBALLOCATION_TYPE_FREE };
13615 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13616 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13618 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13622 pMetadata->m_SumFreeSize -= it->size;
13623 offset = it->offset + it->size;
13627 if(offset < blockSize)
13629 ++pMetadata->m_FreeCount;
13630 const VkDeviceSize freeSize = blockSize - offset;
13631 VmaSuballocation suballoc = {
13635 VMA_SUBALLOCATION_TYPE_FREE };
13636 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13637 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13638 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13640 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13645 pMetadata->m_FreeSuballocationsBySize.begin(),
13646 pMetadata->m_FreeSuballocationsBySize.end(),
13647 VmaSuballocationItemSizeLess());
13650 VMA_HEAVY_ASSERT(pMetadata->Validate());
13654 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13657 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13658 while(it != pMetadata->m_Suballocations.end())
13660 if(it->offset < suballoc.offset)
13665 pMetadata->m_Suballocations.insert(it, suballoc);
13671 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13674 VmaBlockVector* pBlockVector,
13675 uint32_t currFrameIndex) :
13677 mutexLocked(false),
13678 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13679 m_hAllocator(hAllocator),
13680 m_hCustomPool(hCustomPool),
13681 m_pBlockVector(pBlockVector),
13682 m_CurrFrameIndex(currFrameIndex),
13683 m_pAlgorithm(VMA_NULL),
13684 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13685 m_AllAllocations(false)
13689 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13691 vma_delete(m_hAllocator, m_pAlgorithm);
13694 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13696 AllocInfo info = { hAlloc, pChanged };
13697 m_Allocations.push_back(info);
13700 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13702 const bool allAllocations = m_AllAllocations ||
13703 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13715 if(VMA_DEBUG_MARGIN == 0 &&
13717 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13719 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13720 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13724 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13725 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13730 m_pAlgorithm->AddAll();
13734 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13736 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13744 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13746 uint32_t currFrameIndex,
13749 m_hAllocator(hAllocator),
13750 m_CurrFrameIndex(currFrameIndex),
13753 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13755 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13758 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13760 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13762 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13763 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13764 vma_delete(m_hAllocator, pBlockVectorCtx);
13766 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13768 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13769 if(pBlockVectorCtx)
13771 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13772 vma_delete(m_hAllocator, pBlockVectorCtx);
13777 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13779 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13781 VmaPool pool = pPools[poolIndex];
13784 if(pool->m_BlockVector.GetAlgorithm() == 0)
13786 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13788 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13790 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13792 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13797 if(!pBlockVectorDefragCtx)
13799 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13802 &pool->m_BlockVector,
13804 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13807 pBlockVectorDefragCtx->AddAll();
13812 void VmaDefragmentationContext_T::AddAllocations(
13813 uint32_t allocationCount,
13815 VkBool32* pAllocationsChanged)
13818 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13821 VMA_ASSERT(hAlloc);
13823 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13825 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13827 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13829 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13831 if(hAllocPool != VK_NULL_HANDLE)
13834 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13836 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13838 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13840 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13844 if(!pBlockVectorDefragCtx)
13846 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13849 &hAllocPool->m_BlockVector,
13851 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13858 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13859 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13860 if(!pBlockVectorDefragCtx)
13862 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13865 m_hAllocator->m_pBlockVectors[memTypeIndex],
13867 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13871 if(pBlockVectorDefragCtx)
13873 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13874 &pAllocationsChanged[allocIndex] : VMA_NULL;
13875 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13881 VkResult VmaDefragmentationContext_T::Defragment(
13882 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13883 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13891 if(commandBuffer == VK_NULL_HANDLE)
13893 maxGpuBytesToMove = 0;
13894 maxGpuAllocationsToMove = 0;
13897 VkResult res = VK_SUCCESS;
13900 for(uint32_t memTypeIndex = 0;
13901 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13904 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13905 if(pBlockVectorCtx)
13907 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13908 pBlockVectorCtx->GetBlockVector()->Defragment(
13911 maxCpuBytesToMove, maxCpuAllocationsToMove,
13912 maxGpuBytesToMove, maxGpuAllocationsToMove,
13914 if(pBlockVectorCtx->res != VK_SUCCESS)
13916 res = pBlockVectorCtx->res;
13922 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13923 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13926 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13927 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13928 pBlockVectorCtx->GetBlockVector()->Defragment(
13931 maxCpuBytesToMove, maxCpuAllocationsToMove,
13932 maxGpuBytesToMove, maxGpuAllocationsToMove,
13934 if(pBlockVectorCtx->res != VK_SUCCESS)
13936 res = pBlockVectorCtx->res;
13946 #if VMA_RECORDING_ENABLED
13948 VmaRecorder::VmaRecorder() :
13953 m_StartCounter(INT64_MAX)
13959 m_UseMutex = useMutex;
13960 m_Flags = settings.
flags;
13962 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13963 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13966 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13969 return VK_ERROR_INITIALIZATION_FAILED;
13973 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13974 fprintf(m_File,
"%s\n",
"1,8");
13979 VmaRecorder::~VmaRecorder()
13981 if(m_File != VMA_NULL)
13987 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13989 CallParams callParams;
13990 GetBasicParams(callParams);
13992 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13993 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13997 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13999 CallParams callParams;
14000 GetBasicParams(callParams);
14002 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14003 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14009 CallParams callParams;
14010 GetBasicParams(callParams);
14012 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14013 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14024 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14026 CallParams callParams;
14027 GetBasicParams(callParams);
14029 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14030 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14035 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14036 const VkMemoryRequirements& vkMemReq,
14040 CallParams callParams;
14041 GetBasicParams(callParams);
14043 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14044 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14045 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14047 vkMemReq.alignment,
14048 vkMemReq.memoryTypeBits,
14056 userDataStr.GetString());
14060 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14061 const VkMemoryRequirements& vkMemReq,
14063 uint64_t allocationCount,
14066 CallParams callParams;
14067 GetBasicParams(callParams);
14069 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14070 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14071 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14073 vkMemReq.alignment,
14074 vkMemReq.memoryTypeBits,
14081 PrintPointerList(allocationCount, pAllocations);
14082 fprintf(m_File,
",%s\n", userDataStr.GetString());
14086 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14087 const VkMemoryRequirements& vkMemReq,
14088 bool requiresDedicatedAllocation,
14089 bool prefersDedicatedAllocation,
14093 CallParams callParams;
14094 GetBasicParams(callParams);
14096 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14097 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14098 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14100 vkMemReq.alignment,
14101 vkMemReq.memoryTypeBits,
14102 requiresDedicatedAllocation ? 1 : 0,
14103 prefersDedicatedAllocation ? 1 : 0,
14111 userDataStr.GetString());
14115 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14116 const VkMemoryRequirements& vkMemReq,
14117 bool requiresDedicatedAllocation,
14118 bool prefersDedicatedAllocation,
14122 CallParams callParams;
14123 GetBasicParams(callParams);
14125 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14126 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14127 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14129 vkMemReq.alignment,
14130 vkMemReq.memoryTypeBits,
14131 requiresDedicatedAllocation ? 1 : 0,
14132 prefersDedicatedAllocation ? 1 : 0,
14140 userDataStr.GetString());
14144 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14147 CallParams callParams;
14148 GetBasicParams(callParams);
14150 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14151 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14156 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14157 uint64_t allocationCount,
14160 CallParams callParams;
14161 GetBasicParams(callParams);
14163 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14164 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14165 PrintPointerList(allocationCount, pAllocations);
14166 fprintf(m_File,
"\n");
14170 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14172 const void* pUserData)
14174 CallParams callParams;
14175 GetBasicParams(callParams);
14177 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14178 UserDataString userDataStr(
14181 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14183 userDataStr.GetString());
14187 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14190 CallParams callParams;
14191 GetBasicParams(callParams);
14193 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14194 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14199 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14202 CallParams callParams;
14203 GetBasicParams(callParams);
14205 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14206 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14211 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14214 CallParams callParams;
14215 GetBasicParams(callParams);
14217 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14218 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14223 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14224 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14226 CallParams callParams;
14227 GetBasicParams(callParams);
14229 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14230 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14237 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14238 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14240 CallParams callParams;
14241 GetBasicParams(callParams);
14243 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14244 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14251 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14252 const VkBufferCreateInfo& bufCreateInfo,
14256 CallParams callParams;
14257 GetBasicParams(callParams);
14259 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14260 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14261 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14262 bufCreateInfo.flags,
14263 bufCreateInfo.size,
14264 bufCreateInfo.usage,
14265 bufCreateInfo.sharingMode,
14266 allocCreateInfo.
flags,
14267 allocCreateInfo.
usage,
14271 allocCreateInfo.
pool,
14273 userDataStr.GetString());
14277 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14278 const VkImageCreateInfo& imageCreateInfo,
14282 CallParams callParams;
14283 GetBasicParams(callParams);
14285 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14286 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14287 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14288 imageCreateInfo.flags,
14289 imageCreateInfo.imageType,
14290 imageCreateInfo.format,
14291 imageCreateInfo.extent.width,
14292 imageCreateInfo.extent.height,
14293 imageCreateInfo.extent.depth,
14294 imageCreateInfo.mipLevels,
14295 imageCreateInfo.arrayLayers,
14296 imageCreateInfo.samples,
14297 imageCreateInfo.tiling,
14298 imageCreateInfo.usage,
14299 imageCreateInfo.sharingMode,
14300 imageCreateInfo.initialLayout,
14301 allocCreateInfo.
flags,
14302 allocCreateInfo.
usage,
14306 allocCreateInfo.
pool,
14308 userDataStr.GetString());
14312 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14315 CallParams callParams;
14316 GetBasicParams(callParams);
14318 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14319 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14324 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14327 CallParams callParams;
14328 GetBasicParams(callParams);
14330 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14331 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14336 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14339 CallParams callParams;
14340 GetBasicParams(callParams);
14342 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14343 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14348 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14351 CallParams callParams;
14352 GetBasicParams(callParams);
14354 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14355 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14360 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14363 CallParams callParams;
14364 GetBasicParams(callParams);
14366 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14367 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14372 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14376 CallParams callParams;
14377 GetBasicParams(callParams);
14379 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14380 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14383 fprintf(m_File,
",");
14385 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14395 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14398 CallParams callParams;
14399 GetBasicParams(callParams);
14401 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14402 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14407 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14411 CallParams callParams;
14412 GetBasicParams(callParams);
14414 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14415 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14416 pool, name != VMA_NULL ? name :
"");
14422 if(pUserData != VMA_NULL)
14426 m_Str = (
const char*)pUserData;
14430 sprintf_s(m_PtrStr,
"%p", pUserData);
14440 void VmaRecorder::WriteConfiguration(
14441 const VkPhysicalDeviceProperties& devProps,
14442 const VkPhysicalDeviceMemoryProperties& memProps,
14443 uint32_t vulkanApiVersion,
14444 bool dedicatedAllocationExtensionEnabled,
14445 bool bindMemory2ExtensionEnabled,
14446 bool memoryBudgetExtensionEnabled)
14448 fprintf(m_File,
"Config,Begin\n");
14450 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
14452 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14453 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14454 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14455 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14456 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14457 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14459 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14460 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14461 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14463 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14464 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14466 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14467 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14469 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14470 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14472 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14473 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14476 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14477 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14478 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14480 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14481 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14482 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14483 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14484 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14485 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14486 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14487 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14488 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14490 fprintf(m_File,
"Config,End\n");
14493 void VmaRecorder::GetBasicParams(CallParams& outParams)
14495 outParams.threadId = GetCurrentThreadId();
14497 LARGE_INTEGER counter;
14498 QueryPerformanceCounter(&counter);
14499 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14502 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14506 fprintf(m_File,
"%p", pItems[0]);
14507 for(uint64_t i = 1; i < count; ++i)
14509 fprintf(m_File,
" %p", pItems[i]);
14514 void VmaRecorder::Flush()
14522 #endif // #if VMA_RECORDING_ENABLED
14527 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14528 m_Allocator(pAllocationCallbacks, 1024)
14534 VmaMutexLock mutexLock(m_Mutex);
14535 return m_Allocator.Alloc();
14538 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14540 VmaMutexLock mutexLock(m_Mutex);
14541 m_Allocator.Free(hAlloc);
14549 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
14553 m_hDevice(pCreateInfo->device),
14554 m_hInstance(pCreateInfo->instance),
14555 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14556 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14557 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14558 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14559 m_HeapSizeLimitMask(0),
14560 m_PreferredLargeHeapBlockSize(0),
14561 m_PhysicalDevice(pCreateInfo->physicalDevice),
14562 m_CurrentFrameIndex(0),
14563 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14564 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14567 ,m_pRecorder(VMA_NULL)
14570 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14572 m_UseKhrDedicatedAllocation =
false;
14573 m_UseKhrBindMemory2 =
false;
14576 if(VMA_DEBUG_DETECT_CORRUPTION)
14579 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14584 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14586 #if !(VMA_DEDICATED_ALLOCATION)
14589 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14592 #if !(VMA_BIND_MEMORY2)
14595 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14599 #if !(VMA_MEMORY_BUDGET)
14602 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14605 #if VMA_VULKAN_VERSION < 1001000
14606 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14608 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
14612 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14613 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14614 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14616 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14617 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14618 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14628 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14629 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14631 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14632 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14633 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14634 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14641 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14643 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14644 if(limit != VK_WHOLE_SIZE)
14646 m_HeapSizeLimitMask |= 1u << heapIndex;
14647 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14649 m_MemProps.memoryHeaps[heapIndex].size = limit;
14655 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14657 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14659 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14663 preferredBlockSize,
14666 GetBufferImageGranularity(),
14672 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14679 VkResult res = VK_SUCCESS;
14684 #if VMA_RECORDING_ENABLED
14685 m_pRecorder = vma_new(
this, VmaRecorder)();
14687 if(res != VK_SUCCESS)
14691 m_pRecorder->WriteConfiguration(
14692 m_PhysicalDeviceProperties,
14694 m_VulkanApiVersion,
14695 m_UseKhrDedicatedAllocation,
14696 m_UseKhrBindMemory2,
14697 m_UseExtMemoryBudget);
14698 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14700 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14701 return VK_ERROR_FEATURE_NOT_PRESENT;
14705 #if VMA_MEMORY_BUDGET
14706 if(m_UseExtMemoryBudget)
14708 UpdateVulkanBudget();
14710 #endif // #if VMA_MEMORY_BUDGET
14715 VmaAllocator_T::~VmaAllocator_T()
14717 #if VMA_RECORDING_ENABLED
14718 if(m_pRecorder != VMA_NULL)
14720 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14721 vma_delete(
this, m_pRecorder);
14725 VMA_ASSERT(m_Pools.empty());
14727 for(
size_t i = GetMemoryTypeCount(); i--; )
14729 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14731 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14734 vma_delete(
this, m_pDedicatedAllocations[i]);
14735 vma_delete(
this, m_pBlockVectors[i]);
14739 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14741 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14742 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14743 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14744 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14745 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14746 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14747 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14748 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14749 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14750 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14751 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14752 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14753 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14754 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14755 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14756 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14757 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14758 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14759 #if VMA_VULKAN_VERSION >= 1001000
14760 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14762 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14763 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14764 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2");
14765 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14766 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2");
14767 m_VulkanFunctions.vkBindBufferMemory2KHR =
14768 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2");
14769 m_VulkanFunctions.vkBindImageMemory2KHR =
14770 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2");
14771 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14772 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2");
14775 #if VMA_DEDICATED_ALLOCATION
14776 if(m_UseKhrDedicatedAllocation)
14778 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14779 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14780 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14781 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14784 #if VMA_BIND_MEMORY2
14785 if(m_UseKhrBindMemory2)
14787 m_VulkanFunctions.vkBindBufferMemory2KHR =
14788 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14789 m_VulkanFunctions.vkBindImageMemory2KHR =
14790 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14792 #endif // #if VMA_BIND_MEMORY2
14793 #if VMA_MEMORY_BUDGET
14794 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14796 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14797 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14798 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
14800 #endif // #if VMA_MEMORY_BUDGET
14801 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14803 #define VMA_COPY_IF_NOT_NULL(funcName) \
14804 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14806 if(pVulkanFunctions != VMA_NULL)
14808 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14809 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14810 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14811 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14812 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14813 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14814 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14815 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14816 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14817 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14818 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14819 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14820 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14821 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14822 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14823 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14824 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14825 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14826 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14827 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14829 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14830 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14831 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14833 #if VMA_MEMORY_BUDGET
14834 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
14838 #undef VMA_COPY_IF_NOT_NULL
14842 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14843 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14844 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14845 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14846 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14847 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14848 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14849 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14850 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14851 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14852 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14853 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14854 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14855 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14856 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14857 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14858 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14859 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14860 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
14862 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14863 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14866 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14867 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
14869 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14870 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14873 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
14874 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14876 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14881 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14883 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14884 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14885 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14886 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
14889 VkResult VmaAllocator_T::AllocateMemoryOfType(
14891 VkDeviceSize alignment,
14892 bool dedicatedAllocation,
14893 VkBuffer dedicatedBuffer,
14894 VkImage dedicatedImage,
14896 uint32_t memTypeIndex,
14897 VmaSuballocationType suballocType,
14898 size_t allocationCount,
14901 VMA_ASSERT(pAllocations != VMA_NULL);
14902 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14908 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14918 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14919 VMA_ASSERT(blockVector);
14921 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14922 bool preferDedicatedMemory =
14923 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14924 dedicatedAllocation ||
14926 size > preferredBlockSize / 2;
14928 if(preferDedicatedMemory &&
14930 finalCreateInfo.
pool == VK_NULL_HANDLE)
14939 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14943 return AllocateDedicatedMemory(
14959 VkResult res = blockVector->Allocate(
14960 m_CurrentFrameIndex.load(),
14967 if(res == VK_SUCCESS)
14975 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14979 res = AllocateDedicatedMemory(
14986 finalCreateInfo.pUserData,
14991 if(res == VK_SUCCESS)
14994 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15000 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15007 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15009 VmaSuballocationType suballocType,
15010 uint32_t memTypeIndex,
15013 bool isUserDataString,
15015 VkBuffer dedicatedBuffer,
15016 VkImage dedicatedImage,
15017 size_t allocationCount,
15020 VMA_ASSERT(allocationCount > 0 && pAllocations);
15024 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15026 GetBudget(&heapBudget, heapIndex, 1);
15027 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15029 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15033 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15034 allocInfo.memoryTypeIndex = memTypeIndex;
15035 allocInfo.allocationSize = size;
15037 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15038 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15039 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15041 if(dedicatedBuffer != VK_NULL_HANDLE)
15043 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15044 dedicatedAllocInfo.buffer = dedicatedBuffer;
15045 allocInfo.pNext = &dedicatedAllocInfo;
15047 else if(dedicatedImage != VK_NULL_HANDLE)
15049 dedicatedAllocInfo.image = dedicatedImage;
15050 allocInfo.pNext = &dedicatedAllocInfo;
15053 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15056 VkResult res = VK_SUCCESS;
15057 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15059 res = AllocateDedicatedMemoryPage(
15067 pAllocations + allocIndex);
15068 if(res != VK_SUCCESS)
15074 if(res == VK_SUCCESS)
15078 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15079 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15080 VMA_ASSERT(pDedicatedAllocations);
15081 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15083 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15087 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15092 while(allocIndex--)
15095 VkDeviceMemory hMemory = currAlloc->GetMemory();
15107 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15108 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15109 currAlloc->SetUserData(
this, VMA_NULL);
15111 m_AllocationObjectAllocator.Free(currAlloc);
15114 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15120 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15122 VmaSuballocationType suballocType,
15123 uint32_t memTypeIndex,
15124 const VkMemoryAllocateInfo& allocInfo,
15126 bool isUserDataString,
15130 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15131 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15134 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15138 void* pMappedData = VMA_NULL;
15141 res = (*m_VulkanFunctions.vkMapMemory)(
15150 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15151 FreeVulkanMemory(memTypeIndex, size, hMemory);
15156 *pAllocation = m_AllocationObjectAllocator.Allocate();
15157 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
15158 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15159 (*pAllocation)->SetUserData(
this, pUserData);
15160 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15161 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15163 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15169 void VmaAllocator_T::GetBufferMemoryRequirements(
15171 VkMemoryRequirements& memReq,
15172 bool& requiresDedicatedAllocation,
15173 bool& prefersDedicatedAllocation)
const
15175 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15176 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15178 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15179 memReqInfo.buffer = hBuffer;
15181 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15183 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15184 memReq2.pNext = &memDedicatedReq;
15186 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15188 memReq = memReq2.memoryRequirements;
15189 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15190 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15193 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15195 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15196 requiresDedicatedAllocation =
false;
15197 prefersDedicatedAllocation =
false;
15201 void VmaAllocator_T::GetImageMemoryRequirements(
15203 VkMemoryRequirements& memReq,
15204 bool& requiresDedicatedAllocation,
15205 bool& prefersDedicatedAllocation)
const
15207 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15208 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15210 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15211 memReqInfo.image = hImage;
15213 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15215 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15216 memReq2.pNext = &memDedicatedReq;
15218 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15220 memReq = memReq2.memoryRequirements;
15221 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15222 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15225 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15227 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15228 requiresDedicatedAllocation =
false;
15229 prefersDedicatedAllocation =
false;
15233 VkResult VmaAllocator_T::AllocateMemory(
15234 const VkMemoryRequirements& vkMemReq,
15235 bool requiresDedicatedAllocation,
15236 bool prefersDedicatedAllocation,
15237 VkBuffer dedicatedBuffer,
15238 VkImage dedicatedImage,
15240 VmaSuballocationType suballocType,
15241 size_t allocationCount,
15244 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15246 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15248 if(vkMemReq.size == 0)
15250 return VK_ERROR_VALIDATION_FAILED_EXT;
15255 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15256 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15261 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15262 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15264 if(requiresDedicatedAllocation)
15268 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15269 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15271 if(createInfo.
pool != VK_NULL_HANDLE)
15273 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15274 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15277 if((createInfo.
pool != VK_NULL_HANDLE) &&
15280 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15281 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15284 if(createInfo.
pool != VK_NULL_HANDLE)
15286 const VkDeviceSize alignmentForPool = VMA_MAX(
15287 vkMemReq.alignment,
15288 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15293 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15298 return createInfo.
pool->m_BlockVector.Allocate(
15299 m_CurrentFrameIndex.load(),
15310 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15311 uint32_t memTypeIndex = UINT32_MAX;
15313 if(res == VK_SUCCESS)
15315 VkDeviceSize alignmentForMemType = VMA_MAX(
15316 vkMemReq.alignment,
15317 GetMemoryTypeMinAlignment(memTypeIndex));
15319 res = AllocateMemoryOfType(
15321 alignmentForMemType,
15322 requiresDedicatedAllocation || prefersDedicatedAllocation,
15331 if(res == VK_SUCCESS)
15341 memoryTypeBits &= ~(1u << memTypeIndex);
15344 if(res == VK_SUCCESS)
15346 alignmentForMemType = VMA_MAX(
15347 vkMemReq.alignment,
15348 GetMemoryTypeMinAlignment(memTypeIndex));
15350 res = AllocateMemoryOfType(
15352 alignmentForMemType,
15353 requiresDedicatedAllocation || prefersDedicatedAllocation,
15362 if(res == VK_SUCCESS)
15372 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15383 void VmaAllocator_T::FreeMemory(
15384 size_t allocationCount,
15387 VMA_ASSERT(pAllocations);
15389 for(
size_t allocIndex = allocationCount; allocIndex--; )
15393 if(allocation != VK_NULL_HANDLE)
15395 if(TouchAllocation(allocation))
15397 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15399 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15402 switch(allocation->GetType())
15404 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15406 VmaBlockVector* pBlockVector = VMA_NULL;
15407 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15408 if(hPool != VK_NULL_HANDLE)
15410 pBlockVector = &hPool->m_BlockVector;
15414 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15415 pBlockVector = m_pBlockVectors[memTypeIndex];
15417 pBlockVector->Free(allocation);
15420 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15421 FreeDedicatedMemory(allocation);
15429 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15430 allocation->SetUserData(
this, VMA_NULL);
15431 allocation->Dtor();
15432 m_AllocationObjectAllocator.Free(allocation);
15437 VkResult VmaAllocator_T::ResizeAllocation(
15439 VkDeviceSize newSize)
15442 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15444 return VK_ERROR_VALIDATION_FAILED_EXT;
15446 if(newSize == alloc->GetSize())
15450 return VK_ERROR_OUT_OF_POOL_MEMORY;
15453 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15456 InitStatInfo(pStats->
total);
15457 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15459 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15463 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15465 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15466 VMA_ASSERT(pBlockVector);
15467 pBlockVector->AddStats(pStats);
15472 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15473 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15475 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15480 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15482 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15483 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15484 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15485 VMA_ASSERT(pDedicatedAllocVector);
15486 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15489 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15490 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15491 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15492 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15497 VmaPostprocessCalcStatInfo(pStats->
total);
15498 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15499 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15500 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15501 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15504 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15506 #if VMA_MEMORY_BUDGET
15507 if(m_UseExtMemoryBudget)
15509 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15511 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15512 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15514 const uint32_t heapIndex = firstHeap + i;
15516 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15519 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15521 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15522 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15526 outBudget->
usage = 0;
15530 outBudget->
budget = VMA_MIN(
15531 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15536 UpdateVulkanBudget();
15537 GetBudget(outBudget, firstHeap, heapCount);
15543 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15545 const uint32_t heapIndex = firstHeap + i;
15547 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15551 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15556 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15558 VkResult VmaAllocator_T::DefragmentationBegin(
15568 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15569 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15572 (*pContext)->AddAllocations(
15575 VkResult res = (*pContext)->Defragment(
15580 if(res != VK_NOT_READY)
15582 vma_delete(
this, *pContext);
15583 *pContext = VMA_NULL;
15589 VkResult VmaAllocator_T::DefragmentationEnd(
15592 vma_delete(
this, context);
15598 if(hAllocation->CanBecomeLost())
15604 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15605 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15608 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15612 pAllocationInfo->
offset = 0;
15613 pAllocationInfo->
size = hAllocation->GetSize();
15615 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15618 else if(localLastUseFrameIndex == localCurrFrameIndex)
15620 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15621 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15622 pAllocationInfo->
offset = hAllocation->GetOffset();
15623 pAllocationInfo->
size = hAllocation->GetSize();
15625 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15630 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15632 localLastUseFrameIndex = localCurrFrameIndex;
15639 #if VMA_STATS_STRING_ENABLED
15640 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15641 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15644 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15645 if(localLastUseFrameIndex == localCurrFrameIndex)
15651 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15653 localLastUseFrameIndex = localCurrFrameIndex;
15659 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15660 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15661 pAllocationInfo->
offset = hAllocation->GetOffset();
15662 pAllocationInfo->
size = hAllocation->GetSize();
15663 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15664 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15668 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15671 if(hAllocation->CanBecomeLost())
15673 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15674 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15677 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15681 else if(localLastUseFrameIndex == localCurrFrameIndex)
15687 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15689 localLastUseFrameIndex = localCurrFrameIndex;
15696 #if VMA_STATS_STRING_ENABLED
15697 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15698 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15701 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15702 if(localLastUseFrameIndex == localCurrFrameIndex)
15708 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15710 localLastUseFrameIndex = localCurrFrameIndex;
15722 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15732 return VK_ERROR_INITIALIZATION_FAILED;
15735 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15737 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15739 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15740 if(res != VK_SUCCESS)
15742 vma_delete(
this, *pPool);
15749 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15750 (*pPool)->SetId(m_NextPoolId++);
15751 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15757 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15761 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15762 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15763 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15766 vma_delete(
this, pool);
15771 pool->m_BlockVector.GetPoolStats(pPoolStats);
15774 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15776 m_CurrentFrameIndex.store(frameIndex);
15778 #if VMA_MEMORY_BUDGET
15779 if(m_UseExtMemoryBudget)
15781 UpdateVulkanBudget();
15783 #endif // #if VMA_MEMORY_BUDGET
15786 void VmaAllocator_T::MakePoolAllocationsLost(
15788 size_t* pLostAllocationCount)
15790 hPool->m_BlockVector.MakePoolAllocationsLost(
15791 m_CurrentFrameIndex.load(),
15792 pLostAllocationCount);
15795 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15797 return hPool->m_BlockVector.CheckCorruption();
15800 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15802 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15805 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15807 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15809 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15810 VMA_ASSERT(pBlockVector);
15811 VkResult localRes = pBlockVector->CheckCorruption();
15814 case VK_ERROR_FEATURE_NOT_PRESENT:
15817 finalRes = VK_SUCCESS;
15827 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15828 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15830 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15832 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15835 case VK_ERROR_FEATURE_NOT_PRESENT:
15838 finalRes = VK_SUCCESS;
15850 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15852 *pAllocation = m_AllocationObjectAllocator.Allocate();
15853 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15854 (*pAllocation)->InitLost();
15857 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15859 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15862 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
15864 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15865 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15868 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
15869 if(blockBytesAfterAllocation > heapSize)
15871 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15873 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15881 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
15885 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15887 if(res == VK_SUCCESS)
15889 #if VMA_MEMORY_BUDGET
15890 ++m_Budget.m_OperationsSinceBudgetFetch;
15894 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15896 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15901 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
15907 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15910 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15912 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15916 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15918 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
15921 VkResult VmaAllocator_T::BindVulkanBuffer(
15922 VkDeviceMemory memory,
15923 VkDeviceSize memoryOffset,
15927 if(pNext != VMA_NULL)
15929 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15930 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15931 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15933 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15934 bindBufferMemoryInfo.pNext = pNext;
15935 bindBufferMemoryInfo.buffer = buffer;
15936 bindBufferMemoryInfo.memory = memory;
15937 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15938 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15941 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15943 return VK_ERROR_EXTENSION_NOT_PRESENT;
15948 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15952 VkResult VmaAllocator_T::BindVulkanImage(
15953 VkDeviceMemory memory,
15954 VkDeviceSize memoryOffset,
15958 if(pNext != VMA_NULL)
15960 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15961 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15962 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15964 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15965 bindBufferMemoryInfo.pNext = pNext;
15966 bindBufferMemoryInfo.image = image;
15967 bindBufferMemoryInfo.memory = memory;
15968 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15969 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15972 #endif // #if VMA_BIND_MEMORY2
15974 return VK_ERROR_EXTENSION_NOT_PRESENT;
15979 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15983 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15985 if(hAllocation->CanBecomeLost())
15987 return VK_ERROR_MEMORY_MAP_FAILED;
15990 switch(hAllocation->GetType())
15992 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15994 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15995 char *pBytes = VMA_NULL;
15996 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15997 if(res == VK_SUCCESS)
15999 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
16000 hAllocation->BlockAllocMap();
16004 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16005 return hAllocation->DedicatedAllocMap(
this, ppData);
16008 return VK_ERROR_MEMORY_MAP_FAILED;
16014 switch(hAllocation->GetType())
16016 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16018 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16019 hAllocation->BlockAllocUnmap();
16020 pBlock->Unmap(
this, 1);
16023 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16024 hAllocation->DedicatedAllocUnmap(
this);
16031 VkResult VmaAllocator_T::BindBufferMemory(
16033 VkDeviceSize allocationLocalOffset,
16037 VkResult res = VK_SUCCESS;
16038 switch(hAllocation->GetType())
16040 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16041 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16043 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16045 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16046 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16047 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16056 VkResult VmaAllocator_T::BindImageMemory(
16058 VkDeviceSize allocationLocalOffset,
16062 VkResult res = VK_SUCCESS;
16063 switch(hAllocation->GetType())
16065 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16066 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16068 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16070 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16071 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16072 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16081 void VmaAllocator_T::FlushOrInvalidateAllocation(
16083 VkDeviceSize offset, VkDeviceSize size,
16084 VMA_CACHE_OPERATION op)
16086 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16087 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16089 const VkDeviceSize allocationSize = hAllocation->GetSize();
16090 VMA_ASSERT(offset <= allocationSize);
16092 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16094 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16095 memRange.memory = hAllocation->GetMemory();
16097 switch(hAllocation->GetType())
16099 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16100 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16101 if(size == VK_WHOLE_SIZE)
16103 memRange.size = allocationSize - memRange.offset;
16107 VMA_ASSERT(offset + size <= allocationSize);
16108 memRange.size = VMA_MIN(
16109 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16110 allocationSize - memRange.offset);
16114 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16117 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16118 if(size == VK_WHOLE_SIZE)
16120 size = allocationSize - offset;
16124 VMA_ASSERT(offset + size <= allocationSize);
16126 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16129 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16130 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16131 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16132 memRange.offset += allocationOffset;
16133 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16144 case VMA_CACHE_FLUSH:
16145 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16147 case VMA_CACHE_INVALIDATE:
16148 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16157 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16159 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16161 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16163 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16164 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16165 VMA_ASSERT(pDedicatedAllocations);
16166 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16167 VMA_ASSERT(success);
16170 VkDeviceMemory hMemory = allocation->GetMemory();
16182 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16184 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16187 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16189 VkBufferCreateInfo dummyBufCreateInfo;
16190 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16192 uint32_t memoryTypeBits = 0;
16195 VkBuffer buf = VK_NULL_HANDLE;
16196 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16197 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16198 if(res == VK_SUCCESS)
16201 VkMemoryRequirements memReq;
16202 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16203 memoryTypeBits = memReq.memoryTypeBits;
16206 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16209 return memoryTypeBits;
16212 #if VMA_MEMORY_BUDGET
16214 void VmaAllocator_T::UpdateVulkanBudget()
16216 VMA_ASSERT(m_UseExtMemoryBudget);
16218 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16220 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16221 memProps.pNext = &budgetProps;
16223 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16226 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16228 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16230 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16231 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16232 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16234 m_Budget.m_OperationsSinceBudgetFetch = 0;
16238 #endif // #if VMA_MEMORY_BUDGET
16240 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16242 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16243 !hAllocation->CanBecomeLost() &&
16244 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16246 void* pData = VMA_NULL;
16247 VkResult res = Map(hAllocation, &pData);
16248 if(res == VK_SUCCESS)
16250 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16251 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16252 Unmap(hAllocation);
16256 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16261 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16263 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16264 if(memoryTypeBits == UINT32_MAX)
16266 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16267 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16269 return memoryTypeBits;
16272 #if VMA_STATS_STRING_ENABLED
16274 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16276 bool dedicatedAllocationsStarted =
false;
16277 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16279 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16280 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16281 VMA_ASSERT(pDedicatedAllocVector);
16282 if(pDedicatedAllocVector->empty() ==
false)
16284 if(dedicatedAllocationsStarted ==
false)
16286 dedicatedAllocationsStarted =
true;
16287 json.WriteString(
"DedicatedAllocations");
16288 json.BeginObject();
16291 json.BeginString(
"Type ");
16292 json.ContinueString(memTypeIndex);
16297 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16299 json.BeginObject(
true);
16301 hAlloc->PrintParameters(json);
16308 if(dedicatedAllocationsStarted)
16314 bool allocationsStarted =
false;
16315 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16317 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16319 if(allocationsStarted ==
false)
16321 allocationsStarted =
true;
16322 json.WriteString(
"DefaultPools");
16323 json.BeginObject();
16326 json.BeginString(
"Type ");
16327 json.ContinueString(memTypeIndex);
16330 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16333 if(allocationsStarted)
16341 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16342 const size_t poolCount = m_Pools.size();
16345 json.WriteString(
"Pools");
16346 json.BeginObject();
16347 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16349 json.BeginString();
16350 json.ContinueString(m_Pools[poolIndex]->GetId());
16353 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16360 #endif // #if VMA_STATS_STRING_ENABLED
16369 VMA_ASSERT(pCreateInfo && pAllocator);
16372 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16374 return (*pAllocator)->Init(pCreateInfo);
16380 if(allocator != VK_NULL_HANDLE)
16382 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16383 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16384 vma_delete(&allocationCallbacks, allocator);
16390 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16392 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16393 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16398 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16400 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16401 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16406 uint32_t memoryTypeIndex,
16407 VkMemoryPropertyFlags* pFlags)
16409 VMA_ASSERT(allocator && pFlags);
16410 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16411 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16416 uint32_t frameIndex)
16418 VMA_ASSERT(allocator);
16419 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16421 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16423 allocator->SetCurrentFrameIndex(frameIndex);
16430 VMA_ASSERT(allocator && pStats);
16431 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16432 allocator->CalculateStats(pStats);
16439 VMA_ASSERT(allocator && pBudget);
16440 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16441 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16444 #if VMA_STATS_STRING_ENABLED
16448 char** ppStatsString,
16449 VkBool32 detailedMap)
16451 VMA_ASSERT(allocator && ppStatsString);
16452 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16454 VmaStringBuilder sb(allocator);
16456 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16457 json.BeginObject();
16460 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
16463 allocator->CalculateStats(&stats);
16465 json.WriteString(
"Total");
16466 VmaPrintStatInfo(json, stats.
total);
16468 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16470 json.BeginString(
"Heap ");
16471 json.ContinueString(heapIndex);
16473 json.BeginObject();
16475 json.WriteString(
"Size");
16476 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16478 json.WriteString(
"Flags");
16479 json.BeginArray(
true);
16480 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16482 json.WriteString(
"DEVICE_LOCAL");
16486 json.WriteString(
"Budget");
16487 json.BeginObject();
16489 json.WriteString(
"BlockBytes");
16490 json.WriteNumber(budget[heapIndex].blockBytes);
16491 json.WriteString(
"AllocationBytes");
16492 json.WriteNumber(budget[heapIndex].allocationBytes);
16493 json.WriteString(
"Usage");
16494 json.WriteNumber(budget[heapIndex].usage);
16495 json.WriteString(
"Budget");
16496 json.WriteNumber(budget[heapIndex].budget);
16502 json.WriteString(
"Stats");
16503 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16506 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16508 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16510 json.BeginString(
"Type ");
16511 json.ContinueString(typeIndex);
16514 json.BeginObject();
16516 json.WriteString(
"Flags");
16517 json.BeginArray(
true);
16518 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16519 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16521 json.WriteString(
"DEVICE_LOCAL");
16523 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16525 json.WriteString(
"HOST_VISIBLE");
16527 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
16529 json.WriteString(
"HOST_COHERENT");
16531 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
16533 json.WriteString(
"HOST_CACHED");
16535 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
16537 json.WriteString(
"LAZILY_ALLOCATED");
16543 json.WriteString(
"Stats");
16544 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
16553 if(detailedMap == VK_TRUE)
16555 allocator->PrintDetailedMap(json);
16561 const size_t len = sb.GetLength();
16562 char*
const pChars = vma_new_array(allocator,
char, len + 1);
16565 memcpy(pChars, sb.GetData(), len);
16567 pChars[len] =
'\0';
16568 *ppStatsString = pChars;
16573 char* pStatsString)
16575 if(pStatsString != VMA_NULL)
16577 VMA_ASSERT(allocator);
16578 size_t len = strlen(pStatsString);
16579 vma_delete_array(allocator, pStatsString, len + 1);
16583 #endif // #if VMA_STATS_STRING_ENABLED
16590 uint32_t memoryTypeBits,
16592 uint32_t* pMemoryTypeIndex)
16594 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16595 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16596 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16603 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
16604 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
16605 uint32_t notPreferredFlags = 0;
16608 switch(pAllocationCreateInfo->
usage)
16613 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16615 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16619 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
16622 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16623 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16625 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16629 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16630 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16633 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16636 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
16643 *pMemoryTypeIndex = UINT32_MAX;
16644 uint32_t minCost = UINT32_MAX;
16645 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16646 memTypeIndex < allocator->GetMemoryTypeCount();
16647 ++memTypeIndex, memTypeBit <<= 1)
16650 if((memTypeBit & memoryTypeBits) != 0)
16652 const VkMemoryPropertyFlags currFlags =
16653 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16655 if((requiredFlags & ~currFlags) == 0)
16658 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
16659 VmaCountBitsSet(currFlags & notPreferredFlags);
16661 if(currCost < minCost)
16663 *pMemoryTypeIndex = memTypeIndex;
16668 minCost = currCost;
16673 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16678 const VkBufferCreateInfo* pBufferCreateInfo,
16680 uint32_t* pMemoryTypeIndex)
16682 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16683 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16684 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16685 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16687 const VkDevice hDev = allocator->m_hDevice;
16688 VkBuffer hBuffer = VK_NULL_HANDLE;
16689 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16690 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16691 if(res == VK_SUCCESS)
16693 VkMemoryRequirements memReq = {};
16694 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16695 hDev, hBuffer, &memReq);
16699 memReq.memoryTypeBits,
16700 pAllocationCreateInfo,
16703 allocator->GetVulkanFunctions().vkDestroyBuffer(
16704 hDev, hBuffer, allocator->GetAllocationCallbacks());
16711 const VkImageCreateInfo* pImageCreateInfo,
16713 uint32_t* pMemoryTypeIndex)
16715 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16716 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16717 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16718 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16720 const VkDevice hDev = allocator->m_hDevice;
16721 VkImage hImage = VK_NULL_HANDLE;
16722 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16723 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16724 if(res == VK_SUCCESS)
16726 VkMemoryRequirements memReq = {};
16727 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16728 hDev, hImage, &memReq);
16732 memReq.memoryTypeBits,
16733 pAllocationCreateInfo,
16736 allocator->GetVulkanFunctions().vkDestroyImage(
16737 hDev, hImage, allocator->GetAllocationCallbacks());
16747 VMA_ASSERT(allocator && pCreateInfo && pPool);
16749 VMA_DEBUG_LOG(
"vmaCreatePool");
16751 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16753 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16755 #if VMA_RECORDING_ENABLED
16756 if(allocator->GetRecorder() != VMA_NULL)
16758 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16769 VMA_ASSERT(allocator);
16771 if(pool == VK_NULL_HANDLE)
16776 VMA_DEBUG_LOG(
"vmaDestroyPool");
16778 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16780 #if VMA_RECORDING_ENABLED
16781 if(allocator->GetRecorder() != VMA_NULL)
16783 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16787 allocator->DestroyPool(pool);
16795 VMA_ASSERT(allocator && pool && pPoolStats);
16797 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16799 allocator->GetPoolStats(pool, pPoolStats);
16805 size_t* pLostAllocationCount)
16807 VMA_ASSERT(allocator && pool);
16809 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16811 #if VMA_RECORDING_ENABLED
16812 if(allocator->GetRecorder() != VMA_NULL)
16814 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16818 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16823 VMA_ASSERT(allocator && pool);
16825 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16827 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16829 return allocator->CheckPoolCorruption(pool);
16835 const char** ppName)
16837 VMA_ASSERT(allocator && pool);
16839 VMA_DEBUG_LOG(
"vmaGetPoolName");
16841 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16843 *ppName = pool->GetName();
16851 VMA_ASSERT(allocator && pool);
16853 VMA_DEBUG_LOG(
"vmaSetPoolName");
16855 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16857 pool->SetName(pName);
16859 #if VMA_RECORDING_ENABLED
16860 if(allocator->GetRecorder() != VMA_NULL)
16862 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
16869 const VkMemoryRequirements* pVkMemoryRequirements,
16874 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16876 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16878 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16880 VkResult result = allocator->AllocateMemory(
16881 *pVkMemoryRequirements,
16887 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16891 #if VMA_RECORDING_ENABLED
16892 if(allocator->GetRecorder() != VMA_NULL)
16894 allocator->GetRecorder()->RecordAllocateMemory(
16895 allocator->GetCurrentFrameIndex(),
16896 *pVkMemoryRequirements,
16902 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16904 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16912 const VkMemoryRequirements* pVkMemoryRequirements,
16914 size_t allocationCount,
16918 if(allocationCount == 0)
16923 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16925 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16927 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16929 VkResult result = allocator->AllocateMemory(
16930 *pVkMemoryRequirements,
16936 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16940 #if VMA_RECORDING_ENABLED
16941 if(allocator->GetRecorder() != VMA_NULL)
16943 allocator->GetRecorder()->RecordAllocateMemoryPages(
16944 allocator->GetCurrentFrameIndex(),
16945 *pVkMemoryRequirements,
16947 (uint64_t)allocationCount,
16952 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16954 for(
size_t i = 0; i < allocationCount; ++i)
16956 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16970 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16972 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16974 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16976 VkMemoryRequirements vkMemReq = {};
16977 bool requiresDedicatedAllocation =
false;
16978 bool prefersDedicatedAllocation =
false;
16979 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16980 requiresDedicatedAllocation,
16981 prefersDedicatedAllocation);
16983 VkResult result = allocator->AllocateMemory(
16985 requiresDedicatedAllocation,
16986 prefersDedicatedAllocation,
16990 VMA_SUBALLOCATION_TYPE_BUFFER,
16994 #if VMA_RECORDING_ENABLED
16995 if(allocator->GetRecorder() != VMA_NULL)
16997 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16998 allocator->GetCurrentFrameIndex(),
17000 requiresDedicatedAllocation,
17001 prefersDedicatedAllocation,
17007 if(pAllocationInfo && result == VK_SUCCESS)
17009 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17022 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17024 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17026 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17028 VkMemoryRequirements vkMemReq = {};
17029 bool requiresDedicatedAllocation =
false;
17030 bool prefersDedicatedAllocation =
false;
17031 allocator->GetImageMemoryRequirements(image, vkMemReq,
17032 requiresDedicatedAllocation, prefersDedicatedAllocation);
17034 VkResult result = allocator->AllocateMemory(
17036 requiresDedicatedAllocation,
17037 prefersDedicatedAllocation,
17041 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17045 #if VMA_RECORDING_ENABLED
17046 if(allocator->GetRecorder() != VMA_NULL)
17048 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17049 allocator->GetCurrentFrameIndex(),
17051 requiresDedicatedAllocation,
17052 prefersDedicatedAllocation,
17058 if(pAllocationInfo && result == VK_SUCCESS)
17060 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17070 VMA_ASSERT(allocator);
17072 if(allocation == VK_NULL_HANDLE)
17077 VMA_DEBUG_LOG(
"vmaFreeMemory");
17079 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17081 #if VMA_RECORDING_ENABLED
17082 if(allocator->GetRecorder() != VMA_NULL)
17084 allocator->GetRecorder()->RecordFreeMemory(
17085 allocator->GetCurrentFrameIndex(),
17090 allocator->FreeMemory(
17097 size_t allocationCount,
17100 if(allocationCount == 0)
17105 VMA_ASSERT(allocator);
17107 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17109 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17111 #if VMA_RECORDING_ENABLED
17112 if(allocator->GetRecorder() != VMA_NULL)
17114 allocator->GetRecorder()->RecordFreeMemoryPages(
17115 allocator->GetCurrentFrameIndex(),
17116 (uint64_t)allocationCount,
17121 allocator->FreeMemory(allocationCount, pAllocations);
17127 VkDeviceSize newSize)
17129 VMA_ASSERT(allocator && allocation);
17131 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17133 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17135 return allocator->ResizeAllocation(allocation, newSize);
17143 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17145 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17147 #if VMA_RECORDING_ENABLED
17148 if(allocator->GetRecorder() != VMA_NULL)
17150 allocator->GetRecorder()->RecordGetAllocationInfo(
17151 allocator->GetCurrentFrameIndex(),
17156 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17163 VMA_ASSERT(allocator && allocation);
17165 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17167 #if VMA_RECORDING_ENABLED
17168 if(allocator->GetRecorder() != VMA_NULL)
17170 allocator->GetRecorder()->RecordTouchAllocation(
17171 allocator->GetCurrentFrameIndex(),
17176 return allocator->TouchAllocation(allocation);
17184 VMA_ASSERT(allocator && allocation);
17186 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17188 allocation->SetUserData(allocator, pUserData);
17190 #if VMA_RECORDING_ENABLED
17191 if(allocator->GetRecorder() != VMA_NULL)
17193 allocator->GetRecorder()->RecordSetAllocationUserData(
17194 allocator->GetCurrentFrameIndex(),
17205 VMA_ASSERT(allocator && pAllocation);
17207 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17209 allocator->CreateLostAllocation(pAllocation);
17211 #if VMA_RECORDING_ENABLED
17212 if(allocator->GetRecorder() != VMA_NULL)
17214 allocator->GetRecorder()->RecordCreateLostAllocation(
17215 allocator->GetCurrentFrameIndex(),
17226 VMA_ASSERT(allocator && allocation && ppData);
17228 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17230 VkResult res = allocator->Map(allocation, ppData);
17232 #if VMA_RECORDING_ENABLED
17233 if(allocator->GetRecorder() != VMA_NULL)
17235 allocator->GetRecorder()->RecordMapMemory(
17236 allocator->GetCurrentFrameIndex(),
17248 VMA_ASSERT(allocator && allocation);
17250 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17252 #if VMA_RECORDING_ENABLED
17253 if(allocator->GetRecorder() != VMA_NULL)
17255 allocator->GetRecorder()->RecordUnmapMemory(
17256 allocator->GetCurrentFrameIndex(),
17261 allocator->Unmap(allocation);
17266 VMA_ASSERT(allocator && allocation);
17268 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17270 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17272 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17274 #if VMA_RECORDING_ENABLED
17275 if(allocator->GetRecorder() != VMA_NULL)
17277 allocator->GetRecorder()->RecordFlushAllocation(
17278 allocator->GetCurrentFrameIndex(),
17279 allocation, offset, size);
17286 VMA_ASSERT(allocator && allocation);
17288 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17290 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17292 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17294 #if VMA_RECORDING_ENABLED
17295 if(allocator->GetRecorder() != VMA_NULL)
17297 allocator->GetRecorder()->RecordInvalidateAllocation(
17298 allocator->GetCurrentFrameIndex(),
17299 allocation, offset, size);
17306 VMA_ASSERT(allocator);
17308 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17310 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17312 return allocator->CheckCorruption(memoryTypeBits);
17318 size_t allocationCount,
17319 VkBool32* pAllocationsChanged,
17329 if(pDefragmentationInfo != VMA_NULL)
17343 if(res == VK_NOT_READY)
17356 VMA_ASSERT(allocator && pInfo && pContext);
17367 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17369 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17371 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17373 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17375 #if VMA_RECORDING_ENABLED
17376 if(allocator->GetRecorder() != VMA_NULL)
17378 allocator->GetRecorder()->RecordDefragmentationBegin(
17379 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17390 VMA_ASSERT(allocator);
17392 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17394 if(context != VK_NULL_HANDLE)
17396 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17398 #if VMA_RECORDING_ENABLED
17399 if(allocator->GetRecorder() != VMA_NULL)
17401 allocator->GetRecorder()->RecordDefragmentationEnd(
17402 allocator->GetCurrentFrameIndex(), context);
17406 return allocator->DefragmentationEnd(context);
17419 VMA_ASSERT(allocator && allocation && buffer);
17421 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17423 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17425 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17431 VkDeviceSize allocationLocalOffset,
17435 VMA_ASSERT(allocator && allocation && buffer);
17437 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17439 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17441 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17449 VMA_ASSERT(allocator && allocation && image);
17451 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17453 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17455 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17461 VkDeviceSize allocationLocalOffset,
17465 VMA_ASSERT(allocator && allocation && image);
17467 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
17469 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17471 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
17476 const VkBufferCreateInfo* pBufferCreateInfo,
17482 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
17484 if(pBufferCreateInfo->size == 0)
17486 return VK_ERROR_VALIDATION_FAILED_EXT;
17489 VMA_DEBUG_LOG(
"vmaCreateBuffer");
17491 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17493 *pBuffer = VK_NULL_HANDLE;
17494 *pAllocation = VK_NULL_HANDLE;
17497 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17498 allocator->m_hDevice,
17500 allocator->GetAllocationCallbacks(),
17505 VkMemoryRequirements vkMemReq = {};
17506 bool requiresDedicatedAllocation =
false;
17507 bool prefersDedicatedAllocation =
false;
17508 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17509 requiresDedicatedAllocation, prefersDedicatedAllocation);
17513 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
17515 VMA_ASSERT(vkMemReq.alignment %
17516 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
17518 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
17520 VMA_ASSERT(vkMemReq.alignment %
17521 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
17523 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
17525 VMA_ASSERT(vkMemReq.alignment %
17526 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
17530 res = allocator->AllocateMemory(
17532 requiresDedicatedAllocation,
17533 prefersDedicatedAllocation,
17536 *pAllocationCreateInfo,
17537 VMA_SUBALLOCATION_TYPE_BUFFER,
17541 #if VMA_RECORDING_ENABLED
17542 if(allocator->GetRecorder() != VMA_NULL)
17544 allocator->GetRecorder()->RecordCreateBuffer(
17545 allocator->GetCurrentFrameIndex(),
17546 *pBufferCreateInfo,
17547 *pAllocationCreateInfo,
17557 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17562 #if VMA_STATS_STRING_ENABLED
17563 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17565 if(pAllocationInfo != VMA_NULL)
17567 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17572 allocator->FreeMemory(
17575 *pAllocation = VK_NULL_HANDLE;
17576 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17577 *pBuffer = VK_NULL_HANDLE;
17580 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17581 *pBuffer = VK_NULL_HANDLE;
17592 VMA_ASSERT(allocator);
17594 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17599 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
17601 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17603 #if VMA_RECORDING_ENABLED
17604 if(allocator->GetRecorder() != VMA_NULL)
17606 allocator->GetRecorder()->RecordDestroyBuffer(
17607 allocator->GetCurrentFrameIndex(),
17612 if(buffer != VK_NULL_HANDLE)
17614 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17617 if(allocation != VK_NULL_HANDLE)
17619 allocator->FreeMemory(
17627 const VkImageCreateInfo* pImageCreateInfo,
17633 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17635 if(pImageCreateInfo->extent.width == 0 ||
17636 pImageCreateInfo->extent.height == 0 ||
17637 pImageCreateInfo->extent.depth == 0 ||
17638 pImageCreateInfo->mipLevels == 0 ||
17639 pImageCreateInfo->arrayLayers == 0)
17641 return VK_ERROR_VALIDATION_FAILED_EXT;
17644 VMA_DEBUG_LOG(
"vmaCreateImage");
17646 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17648 *pImage = VK_NULL_HANDLE;
17649 *pAllocation = VK_NULL_HANDLE;
17652 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17653 allocator->m_hDevice,
17655 allocator->GetAllocationCallbacks(),
17659 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
17660 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17661 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17664 VkMemoryRequirements vkMemReq = {};
17665 bool requiresDedicatedAllocation =
false;
17666 bool prefersDedicatedAllocation =
false;
17667 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17668 requiresDedicatedAllocation, prefersDedicatedAllocation);
17670 res = allocator->AllocateMemory(
17672 requiresDedicatedAllocation,
17673 prefersDedicatedAllocation,
17676 *pAllocationCreateInfo,
17681 #if VMA_RECORDING_ENABLED
17682 if(allocator->GetRecorder() != VMA_NULL)
17684 allocator->GetRecorder()->RecordCreateImage(
17685 allocator->GetCurrentFrameIndex(),
17687 *pAllocationCreateInfo,
17697 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17702 #if VMA_STATS_STRING_ENABLED
17703 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17705 if(pAllocationInfo != VMA_NULL)
17707 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17712 allocator->FreeMemory(
17715 *pAllocation = VK_NULL_HANDLE;
17716 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17717 *pImage = VK_NULL_HANDLE;
17720 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17721 *pImage = VK_NULL_HANDLE;
17732 VMA_ASSERT(allocator);
17734 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17739 VMA_DEBUG_LOG(
"vmaDestroyImage");
17741 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17743 #if VMA_RECORDING_ENABLED
17744 if(allocator->GetRecorder() != VMA_NULL)
17746 allocator->GetRecorder()->RecordDestroyImage(
17747 allocator->GetCurrentFrameIndex(),
17752 if(image != VK_NULL_HANDLE)
17754 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17756 if(allocation != VK_NULL_HANDLE)
17758 allocator->FreeMemory(
17764 #endif // #ifdef VMA_IMPLEMENTATION