23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1764 #ifndef VMA_RECORDING_ENABLED
1765 #define VMA_RECORDING_ENABLED 0
1769 #define NOMINMAX // For windows.h
1773 #include <vulkan/vulkan.h>
1776 #if VMA_RECORDING_ENABLED
1777 #include <windows.h>
1780 #if !defined(VMA_DEDICATED_ALLOCATION)
1781 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1782 #define VMA_DEDICATED_ALLOCATION 1
1784 #define VMA_DEDICATED_ALLOCATION 0
1788 #if !defined(VMA_BIND_MEMORY2)
1789 #if VK_KHR_bind_memory2
1790 #define VMA_BIND_MEMORY2 1
1792 #define VMA_BIND_MEMORY2 0
1796 #if !defined(VMA_MEMORY_BUDGET)
1797 #if VK_EXT_memory_budget && VK_KHR_get_physical_device_properties2
1798 #define VMA_MEMORY_BUDGET 1
1800 #define VMA_MEMORY_BUDGET 0
1809 #ifndef VMA_CALL_PRE
1810 #define VMA_CALL_PRE
1812 #ifndef VMA_CALL_POST
1813 #define VMA_CALL_POST
1830 uint32_t memoryType,
1831 VkDeviceMemory memory,
1836 uint32_t memoryType,
1837 VkDeviceMemory memory,
1934 #if VMA_DEDICATED_ALLOCATION
1935 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1936 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1938 #if VMA_BIND_MEMORY2
1939 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1940 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1942 #if VMA_MEMORY_BUDGET
1943 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2075 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2083 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2093 uint32_t memoryTypeIndex,
2094 VkMemoryPropertyFlags* pFlags);
2106 uint32_t frameIndex);
2202 #ifndef VMA_STATS_STRING_ENABLED
2203 #define VMA_STATS_STRING_ENABLED 1
2206 #if VMA_STATS_STRING_ENABLED
2213 char** ppStatsString,
2214 VkBool32 detailedMap);
2218 char* pStatsString);
2220 #endif // #if VMA_STATS_STRING_ENABLED
2472 uint32_t memoryTypeBits,
2474 uint32_t* pMemoryTypeIndex);
2490 const VkBufferCreateInfo* pBufferCreateInfo,
2492 uint32_t* pMemoryTypeIndex);
2508 const VkImageCreateInfo* pImageCreateInfo,
2510 uint32_t* pMemoryTypeIndex);
2682 size_t* pLostAllocationCount);
2709 const char** ppName);
2802 const VkMemoryRequirements* pVkMemoryRequirements,
2828 const VkMemoryRequirements* pVkMemoryRequirements,
2830 size_t allocationCount,
2875 size_t allocationCount,
2887 VkDeviceSize newSize);
3267 size_t allocationCount,
3268 VkBool32* pAllocationsChanged,
3302 VkDeviceSize allocationLocalOffset,
3336 VkDeviceSize allocationLocalOffset,
3368 const VkBufferCreateInfo* pBufferCreateInfo,
3393 const VkImageCreateInfo* pImageCreateInfo,
3419 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3422 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3423 #define VMA_IMPLEMENTATION
3426 #ifdef VMA_IMPLEMENTATION
3427 #undef VMA_IMPLEMENTATION
3449 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3450 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3462 #if VMA_USE_STL_CONTAINERS
3463 #define VMA_USE_STL_VECTOR 1
3464 #define VMA_USE_STL_UNORDERED_MAP 1
3465 #define VMA_USE_STL_LIST 1
3468 #ifndef VMA_USE_STL_SHARED_MUTEX
3470 #if __cplusplus >= 201703L
3471 #define VMA_USE_STL_SHARED_MUTEX 1
3475 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3476 #define VMA_USE_STL_SHARED_MUTEX 1
3478 #define VMA_USE_STL_SHARED_MUTEX 0
3486 #if VMA_USE_STL_VECTOR
3490 #if VMA_USE_STL_UNORDERED_MAP
3491 #include <unordered_map>
3494 #if VMA_USE_STL_LIST
3503 #include <algorithm>
3508 #define VMA_NULL nullptr
3511 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3513 void *aligned_alloc(
size_t alignment,
size_t size)
3516 if(alignment <
sizeof(
void*))
3518 alignment =
sizeof(
void*);
3521 return memalign(alignment, size);
3523 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3525 void *aligned_alloc(
size_t alignment,
size_t size)
3528 if(alignment <
sizeof(
void*))
3530 alignment =
sizeof(
void*);
3534 if(posix_memalign(&pointer, alignment, size) == 0)
3548 #define VMA_ASSERT(expr) assert(expr)
3550 #define VMA_ASSERT(expr)
3556 #ifndef VMA_HEAVY_ASSERT
3558 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3560 #define VMA_HEAVY_ASSERT(expr)
3564 #ifndef VMA_ALIGN_OF
3565 #define VMA_ALIGN_OF(type) (__alignof(type))
3568 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3570 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3572 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3576 #ifndef VMA_SYSTEM_FREE
3578 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3580 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3585 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3589 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3593 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3597 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3600 #ifndef VMA_DEBUG_LOG
3601 #define VMA_DEBUG_LOG(format, ...)
3611 #if VMA_STATS_STRING_ENABLED
3612 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3614 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3616 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3618 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3620 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3622 snprintf(outStr, strLen,
"%p", ptr);
3630 void Lock() { m_Mutex.lock(); }
3631 void Unlock() { m_Mutex.unlock(); }
3635 #define VMA_MUTEX VmaMutex
3639 #ifndef VMA_RW_MUTEX
3640 #if VMA_USE_STL_SHARED_MUTEX
3642 #include <shared_mutex>
3646 void LockRead() { m_Mutex.lock_shared(); }
3647 void UnlockRead() { m_Mutex.unlock_shared(); }
3648 void LockWrite() { m_Mutex.lock(); }
3649 void UnlockWrite() { m_Mutex.unlock(); }
3651 std::shared_mutex m_Mutex;
3653 #define VMA_RW_MUTEX VmaRWMutex
3654 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3660 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3661 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3662 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3663 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3664 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3668 #define VMA_RW_MUTEX VmaRWMutex
3674 void LockRead() { m_Mutex.Lock(); }
3675 void UnlockRead() { m_Mutex.Unlock(); }
3676 void LockWrite() { m_Mutex.Lock(); }
3677 void UnlockWrite() { m_Mutex.Unlock(); }
3681 #define VMA_RW_MUTEX VmaRWMutex
3682 #endif // #if VMA_USE_STL_SHARED_MUTEX
3683 #endif // #ifndef VMA_RW_MUTEX
3688 #ifndef VMA_ATOMIC_UINT32
3690 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3693 #ifndef VMA_ATOMIC_UINT64
3695 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3698 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3703 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3706 #ifndef VMA_DEBUG_ALIGNMENT
3711 #define VMA_DEBUG_ALIGNMENT (1)
3714 #ifndef VMA_DEBUG_MARGIN
3719 #define VMA_DEBUG_MARGIN (0)
3722 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3727 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3730 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3736 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3739 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3744 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3747 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3752 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3755 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3756 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3760 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3761 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3765 #ifndef VMA_CLASS_NO_COPY
3766 #define VMA_CLASS_NO_COPY(className) \
3768 className(const className&) = delete; \
3769 className& operator=(const className&) = delete;
3772 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3775 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3777 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3778 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3784 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3786 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3787 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3790 static inline uint32_t VmaCountBitsSet(uint32_t v)
3792 uint32_t c = v - ((v >> 1) & 0x55555555);
3793 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3794 c = ((c >> 4) + c) & 0x0F0F0F0F;
3795 c = ((c >> 8) + c) & 0x00FF00FF;
3796 c = ((c >> 16) + c) & 0x0000FFFF;
3802 template <
typename T>
3803 static inline T VmaAlignUp(T val, T align)
3805 return (val + align - 1) / align * align;
3809 template <
typename T>
3810 static inline T VmaAlignDown(T val, T align)
3812 return val / align * align;
3816 template <
typename T>
3817 static inline T VmaRoundDiv(T x, T y)
3819 return (x + (y / (T)2)) / y;
3827 template <
typename T>
3828 inline bool VmaIsPow2(T x)
3830 return (x & (x-1)) == 0;
3834 static inline uint32_t VmaNextPow2(uint32_t v)
3845 static inline uint64_t VmaNextPow2(uint64_t v)
3859 static inline uint32_t VmaPrevPow2(uint32_t v)
3869 static inline uint64_t VmaPrevPow2(uint64_t v)
3881 static inline bool VmaStrIsEmpty(
const char* pStr)
3883 return pStr == VMA_NULL || *pStr ==
'\0';
3886 #if VMA_STATS_STRING_ENABLED
3888 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3904 #endif // #if VMA_STATS_STRING_ENABLED
3908 template<
typename Iterator,
typename Compare>
3909 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3911 Iterator centerValue = end; --centerValue;
3912 Iterator insertIndex = beg;
3913 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3915 if(cmp(*memTypeIndex, *centerValue))
3917 if(insertIndex != memTypeIndex)
3919 VMA_SWAP(*memTypeIndex, *insertIndex);
3924 if(insertIndex != centerValue)
3926 VMA_SWAP(*insertIndex, *centerValue);
3931 template<
typename Iterator,
typename Compare>
3932 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3936 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3937 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3938 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3942 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
3944 #endif // #ifndef VMA_SORT
3953 static inline bool VmaBlocksOnSamePage(
3954 VkDeviceSize resourceAOffset,
3955 VkDeviceSize resourceASize,
3956 VkDeviceSize resourceBOffset,
3957 VkDeviceSize pageSize)
3959 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3960 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3961 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3962 VkDeviceSize resourceBStart = resourceBOffset;
3963 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3964 return resourceAEndPage == resourceBStartPage;
3967 enum VmaSuballocationType
3969 VMA_SUBALLOCATION_TYPE_FREE = 0,
3970 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3971 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3972 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3973 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3974 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3975 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3984 static inline bool VmaIsBufferImageGranularityConflict(
3985 VmaSuballocationType suballocType1,
3986 VmaSuballocationType suballocType2)
3988 if(suballocType1 > suballocType2)
3990 VMA_SWAP(suballocType1, suballocType2);
3993 switch(suballocType1)
3995 case VMA_SUBALLOCATION_TYPE_FREE:
3997 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3999 case VMA_SUBALLOCATION_TYPE_BUFFER:
4001 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4002 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4003 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4005 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4006 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4007 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4008 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4010 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4011 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4019 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4021 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4022 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4023 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4024 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4026 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4033 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4035 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4036 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4037 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4038 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4040 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4053 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4055 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4056 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4057 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4058 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4064 VMA_CLASS_NO_COPY(VmaMutexLock)
4066 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4067 m_pMutex(useMutex ? &mutex : VMA_NULL)
4068 {
if(m_pMutex) { m_pMutex->Lock(); } }
4070 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4072 VMA_MUTEX* m_pMutex;
4076 struct VmaMutexLockRead
4078 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4080 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4081 m_pMutex(useMutex ? &mutex : VMA_NULL)
4082 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4083 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4085 VMA_RW_MUTEX* m_pMutex;
4089 struct VmaMutexLockWrite
4091 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4093 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4094 m_pMutex(useMutex ? &mutex : VMA_NULL)
4095 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4096 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4098 VMA_RW_MUTEX* m_pMutex;
4101 #if VMA_DEBUG_GLOBAL_MUTEX
4102 static VMA_MUTEX gDebugGlobalMutex;
4103 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4105 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4109 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4120 template <
typename CmpLess,
typename IterT,
typename KeyT>
4121 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4123 size_t down = 0, up = (end - beg);
4126 const size_t mid = (down + up) / 2;
4127 if(cmp(*(beg+mid), key))
4139 template<
typename CmpLess,
typename IterT,
typename KeyT>
4140 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4142 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4143 beg, end, value, cmp);
4145 (!cmp(*it, value) && !cmp(value, *it)))
4157 template<
typename T>
4158 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4160 for(uint32_t i = 0; i < count; ++i)
4162 const T iPtr = arr[i];
4163 if(iPtr == VMA_NULL)
4167 for(uint32_t j = i + 1; j < count; ++j)
4181 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4183 if((pAllocationCallbacks != VMA_NULL) &&
4184 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4186 return (*pAllocationCallbacks->pfnAllocation)(
4187 pAllocationCallbacks->pUserData,
4190 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4194 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4198 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4200 if((pAllocationCallbacks != VMA_NULL) &&
4201 (pAllocationCallbacks->pfnFree != VMA_NULL))
4203 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4207 VMA_SYSTEM_FREE(ptr);
4211 template<
typename T>
4212 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4214 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4217 template<
typename T>
4218 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4220 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4223 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4225 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4227 template<
typename T>
4228 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4231 VmaFree(pAllocationCallbacks, ptr);
4234 template<
typename T>
4235 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4239 for(
size_t i = count; i--; )
4243 VmaFree(pAllocationCallbacks, ptr);
4247 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4249 if(srcStr != VMA_NULL)
4251 const size_t len = strlen(srcStr);
4252 char*
const result = vma_new_array(allocs,
char, len + 1);
4253 memcpy(result, srcStr, len + 1);
4262 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4266 const size_t len = strlen(str);
4267 vma_delete_array(allocs, str, len + 1);
4272 template<
typename T>
4273 class VmaStlAllocator
4276 const VkAllocationCallbacks*
const m_pCallbacks;
4277 typedef T value_type;
4279 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4280 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4282 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4283 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4285 template<
typename U>
4286 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4288 return m_pCallbacks == rhs.m_pCallbacks;
4290 template<
typename U>
4291 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4293 return m_pCallbacks != rhs.m_pCallbacks;
4296 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4299 #if VMA_USE_STL_VECTOR
4301 #define VmaVector std::vector
4303 template<
typename T,
typename allocatorT>
4304 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4306 vec.insert(vec.begin() + index, item);
4309 template<
typename T,
typename allocatorT>
4310 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4312 vec.erase(vec.begin() + index);
4315 #else // #if VMA_USE_STL_VECTOR
4320 template<
typename T,
typename AllocatorT>
4324 typedef T value_type;
4326 VmaVector(
const AllocatorT& allocator) :
4327 m_Allocator(allocator),
4334 VmaVector(
size_t count,
const AllocatorT& allocator) :
4335 m_Allocator(allocator),
4336 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4344 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4345 : VmaVector(count, allocator) {}
4347 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4348 m_Allocator(src.m_Allocator),
4349 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4350 m_Count(src.m_Count),
4351 m_Capacity(src.m_Count)
4355 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4361 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4364 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4368 resize(rhs.m_Count);
4371 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4377 bool empty()
const {
return m_Count == 0; }
4378 size_t size()
const {
return m_Count; }
4379 T* data() {
return m_pArray; }
4380 const T* data()
const {
return m_pArray; }
4382 T& operator[](
size_t index)
4384 VMA_HEAVY_ASSERT(index < m_Count);
4385 return m_pArray[index];
4387 const T& operator[](
size_t index)
const
4389 VMA_HEAVY_ASSERT(index < m_Count);
4390 return m_pArray[index];
4395 VMA_HEAVY_ASSERT(m_Count > 0);
4398 const T& front()
const
4400 VMA_HEAVY_ASSERT(m_Count > 0);
4405 VMA_HEAVY_ASSERT(m_Count > 0);
4406 return m_pArray[m_Count - 1];
4408 const T& back()
const
4410 VMA_HEAVY_ASSERT(m_Count > 0);
4411 return m_pArray[m_Count - 1];
4414 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4416 newCapacity = VMA_MAX(newCapacity, m_Count);
4418 if((newCapacity < m_Capacity) && !freeMemory)
4420 newCapacity = m_Capacity;
4423 if(newCapacity != m_Capacity)
4425 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4428 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4430 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4431 m_Capacity = newCapacity;
4432 m_pArray = newArray;
4436 void resize(
size_t newCount,
bool freeMemory =
false)
4438 size_t newCapacity = m_Capacity;
4439 if(newCount > m_Capacity)
4441 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4445 newCapacity = newCount;
4448 if(newCapacity != m_Capacity)
4450 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4451 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4452 if(elementsToCopy != 0)
4454 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4456 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4457 m_Capacity = newCapacity;
4458 m_pArray = newArray;
4464 void clear(
bool freeMemory =
false)
4466 resize(0, freeMemory);
4469 void insert(
size_t index,
const T& src)
4471 VMA_HEAVY_ASSERT(index <= m_Count);
4472 const size_t oldCount = size();
4473 resize(oldCount + 1);
4474 if(index < oldCount)
4476 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4478 m_pArray[index] = src;
4481 void remove(
size_t index)
4483 VMA_HEAVY_ASSERT(index < m_Count);
4484 const size_t oldCount = size();
4485 if(index < oldCount - 1)
4487 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4489 resize(oldCount - 1);
4492 void push_back(
const T& src)
4494 const size_t newIndex = size();
4495 resize(newIndex + 1);
4496 m_pArray[newIndex] = src;
4501 VMA_HEAVY_ASSERT(m_Count > 0);
4505 void push_front(
const T& src)
4512 VMA_HEAVY_ASSERT(m_Count > 0);
4516 typedef T* iterator;
4518 iterator begin() {
return m_pArray; }
4519 iterator end() {
return m_pArray + m_Count; }
4522 AllocatorT m_Allocator;
4528 template<
typename T,
typename allocatorT>
4529 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4531 vec.insert(index, item);
4534 template<
typename T,
typename allocatorT>
4535 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4540 #endif // #if VMA_USE_STL_VECTOR
4542 template<
typename CmpLess,
typename VectorT>
4543 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4545 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4547 vector.data() + vector.size(),
4549 CmpLess()) - vector.data();
4550 VmaVectorInsert(vector, indexToInsert, value);
4551 return indexToInsert;
4554 template<
typename CmpLess,
typename VectorT>
4555 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4558 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4563 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4565 size_t indexToRemove = it - vector.begin();
4566 VmaVectorRemove(vector, indexToRemove);
4580 template<
typename T>
4581 class VmaPoolAllocator
4583 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4585 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4586 ~VmaPoolAllocator();
4593 uint32_t NextFreeIndex;
4594 alignas(T)
char Value[
sizeof(T)];
4601 uint32_t FirstFreeIndex;
4604 const VkAllocationCallbacks* m_pAllocationCallbacks;
4605 const uint32_t m_FirstBlockCapacity;
4606 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4608 ItemBlock& CreateNewBlock();
4611 template<
typename T>
4612 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4613 m_pAllocationCallbacks(pAllocationCallbacks),
4614 m_FirstBlockCapacity(firstBlockCapacity),
4615 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4617 VMA_ASSERT(m_FirstBlockCapacity > 1);
4620 template<
typename T>
4621 VmaPoolAllocator<T>::~VmaPoolAllocator()
4623 for(
size_t i = m_ItemBlocks.size(); i--; )
4624 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4625 m_ItemBlocks.clear();
4628 template<
typename T>
4629 T* VmaPoolAllocator<T>::Alloc()
4631 for(
size_t i = m_ItemBlocks.size(); i--; )
4633 ItemBlock& block = m_ItemBlocks[i];
4635 if(block.FirstFreeIndex != UINT32_MAX)
4637 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4638 block.FirstFreeIndex = pItem->NextFreeIndex;
4639 T* result = (T*)&pItem->Value;
4646 ItemBlock& newBlock = CreateNewBlock();
4647 Item*
const pItem = &newBlock.pItems[0];
4648 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4649 T* result = (T*)&pItem->Value;
4654 template<
typename T>
4655 void VmaPoolAllocator<T>::Free(T* ptr)
4658 for(
size_t i = m_ItemBlocks.size(); i--; )
4660 ItemBlock& block = m_ItemBlocks[i];
4664 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4667 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4670 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4671 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4672 block.FirstFreeIndex = index;
4676 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4679 template<
typename T>
4680 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4682 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4683 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4685 const ItemBlock newBlock = {
4686 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4690 m_ItemBlocks.push_back(newBlock);
4693 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4694 newBlock.pItems[i].NextFreeIndex = i + 1;
4695 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4696 return m_ItemBlocks.back();
4702 #if VMA_USE_STL_LIST
4704 #define VmaList std::list
4706 #else // #if VMA_USE_STL_LIST
4708 template<
typename T>
4717 template<
typename T>
4720 VMA_CLASS_NO_COPY(VmaRawList)
4722 typedef VmaListItem<T> ItemType;
4724 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4728 size_t GetCount()
const {
return m_Count; }
4729 bool IsEmpty()
const {
return m_Count == 0; }
4731 ItemType* Front() {
return m_pFront; }
4732 const ItemType* Front()
const {
return m_pFront; }
4733 ItemType* Back() {
return m_pBack; }
4734 const ItemType* Back()
const {
return m_pBack; }
4736 ItemType* PushBack();
4737 ItemType* PushFront();
4738 ItemType* PushBack(
const T& value);
4739 ItemType* PushFront(
const T& value);
4744 ItemType* InsertBefore(ItemType* pItem);
4746 ItemType* InsertAfter(ItemType* pItem);
4748 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4749 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4751 void Remove(ItemType* pItem);
4754 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4755 VmaPoolAllocator<ItemType> m_ItemAllocator;
4761 template<
typename T>
4762 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4763 m_pAllocationCallbacks(pAllocationCallbacks),
4764 m_ItemAllocator(pAllocationCallbacks, 128),
4771 template<
typename T>
4772 VmaRawList<T>::~VmaRawList()
4778 template<
typename T>
4779 void VmaRawList<T>::Clear()
4781 if(IsEmpty() ==
false)
4783 ItemType* pItem = m_pBack;
4784 while(pItem != VMA_NULL)
4786 ItemType*
const pPrevItem = pItem->pPrev;
4787 m_ItemAllocator.Free(pItem);
4790 m_pFront = VMA_NULL;
4796 template<
typename T>
4797 VmaListItem<T>* VmaRawList<T>::PushBack()
4799 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4800 pNewItem->pNext = VMA_NULL;
4803 pNewItem->pPrev = VMA_NULL;
4804 m_pFront = pNewItem;
4810 pNewItem->pPrev = m_pBack;
4811 m_pBack->pNext = pNewItem;
4818 template<
typename T>
4819 VmaListItem<T>* VmaRawList<T>::PushFront()
4821 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4822 pNewItem->pPrev = VMA_NULL;
4825 pNewItem->pNext = VMA_NULL;
4826 m_pFront = pNewItem;
4832 pNewItem->pNext = m_pFront;
4833 m_pFront->pPrev = pNewItem;
4834 m_pFront = pNewItem;
4840 template<
typename T>
4841 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4843 ItemType*
const pNewItem = PushBack();
4844 pNewItem->Value = value;
4848 template<
typename T>
4849 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4851 ItemType*
const pNewItem = PushFront();
4852 pNewItem->Value = value;
4856 template<
typename T>
4857 void VmaRawList<T>::PopBack()
4859 VMA_HEAVY_ASSERT(m_Count > 0);
4860 ItemType*
const pBackItem = m_pBack;
4861 ItemType*
const pPrevItem = pBackItem->pPrev;
4862 if(pPrevItem != VMA_NULL)
4864 pPrevItem->pNext = VMA_NULL;
4866 m_pBack = pPrevItem;
4867 m_ItemAllocator.Free(pBackItem);
4871 template<
typename T>
4872 void VmaRawList<T>::PopFront()
4874 VMA_HEAVY_ASSERT(m_Count > 0);
4875 ItemType*
const pFrontItem = m_pFront;
4876 ItemType*
const pNextItem = pFrontItem->pNext;
4877 if(pNextItem != VMA_NULL)
4879 pNextItem->pPrev = VMA_NULL;
4881 m_pFront = pNextItem;
4882 m_ItemAllocator.Free(pFrontItem);
4886 template<
typename T>
4887 void VmaRawList<T>::Remove(ItemType* pItem)
4889 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4890 VMA_HEAVY_ASSERT(m_Count > 0);
4892 if(pItem->pPrev != VMA_NULL)
4894 pItem->pPrev->pNext = pItem->pNext;
4898 VMA_HEAVY_ASSERT(m_pFront == pItem);
4899 m_pFront = pItem->pNext;
4902 if(pItem->pNext != VMA_NULL)
4904 pItem->pNext->pPrev = pItem->pPrev;
4908 VMA_HEAVY_ASSERT(m_pBack == pItem);
4909 m_pBack = pItem->pPrev;
4912 m_ItemAllocator.Free(pItem);
4916 template<
typename T>
4917 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4919 if(pItem != VMA_NULL)
4921 ItemType*
const prevItem = pItem->pPrev;
4922 ItemType*
const newItem = m_ItemAllocator.Alloc();
4923 newItem->pPrev = prevItem;
4924 newItem->pNext = pItem;
4925 pItem->pPrev = newItem;
4926 if(prevItem != VMA_NULL)
4928 prevItem->pNext = newItem;
4932 VMA_HEAVY_ASSERT(m_pFront == pItem);
4942 template<
typename T>
4943 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4945 if(pItem != VMA_NULL)
4947 ItemType*
const nextItem = pItem->pNext;
4948 ItemType*
const newItem = m_ItemAllocator.Alloc();
4949 newItem->pNext = nextItem;
4950 newItem->pPrev = pItem;
4951 pItem->pNext = newItem;
4952 if(nextItem != VMA_NULL)
4954 nextItem->pPrev = newItem;
4958 VMA_HEAVY_ASSERT(m_pBack == pItem);
4968 template<
typename T>
4969 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4971 ItemType*
const newItem = InsertBefore(pItem);
4972 newItem->Value = value;
4976 template<
typename T>
4977 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4979 ItemType*
const newItem = InsertAfter(pItem);
4980 newItem->Value = value;
4984 template<
typename T,
typename AllocatorT>
4987 VMA_CLASS_NO_COPY(VmaList)
4998 T& operator*()
const
5000 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5001 return m_pItem->Value;
5003 T* operator->()
const
5005 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5006 return &m_pItem->Value;
5009 iterator& operator++()
5011 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5012 m_pItem = m_pItem->pNext;
5015 iterator& operator--()
5017 if(m_pItem != VMA_NULL)
5019 m_pItem = m_pItem->pPrev;
5023 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5024 m_pItem = m_pList->Back();
5029 iterator operator++(
int)
5031 iterator result = *
this;
5035 iterator operator--(
int)
5037 iterator result = *
this;
5042 bool operator==(
const iterator& rhs)
const
5044 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5045 return m_pItem == rhs.m_pItem;
5047 bool operator!=(
const iterator& rhs)
const
5049 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5050 return m_pItem != rhs.m_pItem;
5054 VmaRawList<T>* m_pList;
5055 VmaListItem<T>* m_pItem;
5057 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5063 friend class VmaList<T, AllocatorT>;
5066 class const_iterator
5075 const_iterator(
const iterator& src) :
5076 m_pList(src.m_pList),
5077 m_pItem(src.m_pItem)
5081 const T& operator*()
const
5083 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5084 return m_pItem->Value;
5086 const T* operator->()
const
5088 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5089 return &m_pItem->Value;
5092 const_iterator& operator++()
5094 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5095 m_pItem = m_pItem->pNext;
5098 const_iterator& operator--()
5100 if(m_pItem != VMA_NULL)
5102 m_pItem = m_pItem->pPrev;
5106 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5107 m_pItem = m_pList->Back();
5112 const_iterator operator++(
int)
5114 const_iterator result = *
this;
5118 const_iterator operator--(
int)
5120 const_iterator result = *
this;
5125 bool operator==(
const const_iterator& rhs)
const
5127 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5128 return m_pItem == rhs.m_pItem;
5130 bool operator!=(
const const_iterator& rhs)
const
5132 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5133 return m_pItem != rhs.m_pItem;
5137 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5143 const VmaRawList<T>* m_pList;
5144 const VmaListItem<T>* m_pItem;
5146 friend class VmaList<T, AllocatorT>;
5149 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5151 bool empty()
const {
return m_RawList.IsEmpty(); }
5152 size_t size()
const {
return m_RawList.GetCount(); }
5154 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5155 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5157 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5158 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5160 void clear() { m_RawList.Clear(); }
5161 void push_back(
const T& value) { m_RawList.PushBack(value); }
5162 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5163 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5166 VmaRawList<T> m_RawList;
5169 #endif // #if VMA_USE_STL_LIST
5177 #if VMA_USE_STL_UNORDERED_MAP
5179 #define VmaPair std::pair
5181 #define VMA_MAP_TYPE(KeyT, ValueT) \
5182 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5184 #else // #if VMA_USE_STL_UNORDERED_MAP
5186 template<
typename T1,
typename T2>
5192 VmaPair() : first(), second() { }
5193 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5199 template<
typename KeyT,
typename ValueT>
5203 typedef VmaPair<KeyT, ValueT> PairType;
5204 typedef PairType* iterator;
5206 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5208 iterator begin() {
return m_Vector.begin(); }
5209 iterator end() {
return m_Vector.end(); }
5211 void insert(
const PairType& pair);
5212 iterator find(
const KeyT& key);
5213 void erase(iterator it);
5216 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5219 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5221 template<
typename FirstT,
typename SecondT>
5222 struct VmaPairFirstLess
5224 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5226 return lhs.first < rhs.first;
5228 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5230 return lhs.first < rhsFirst;
5234 template<
typename KeyT,
typename ValueT>
5235 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5237 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5239 m_Vector.data() + m_Vector.size(),
5241 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5242 VmaVectorInsert(m_Vector, indexToInsert, pair);
5245 template<
typename KeyT,
typename ValueT>
5246 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5248 PairType* it = VmaBinaryFindFirstNotLess(
5250 m_Vector.data() + m_Vector.size(),
5252 VmaPairFirstLess<KeyT, ValueT>());
5253 if((it != m_Vector.end()) && (it->first == key))
5259 return m_Vector.end();
5263 template<
typename KeyT,
typename ValueT>
5264 void VmaMap<KeyT, ValueT>::erase(iterator it)
5266 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5269 #endif // #if VMA_USE_STL_UNORDERED_MAP
5275 class VmaDeviceMemoryBlock;
5277 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5279 struct VmaAllocation_T
5282 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5286 FLAG_USER_DATA_STRING = 0x01,
5290 enum ALLOCATION_TYPE
5292 ALLOCATION_TYPE_NONE,
5293 ALLOCATION_TYPE_BLOCK,
5294 ALLOCATION_TYPE_DEDICATED,
5301 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5305 m_MemoryTypeIndex = 0;
5306 m_pUserData = VMA_NULL;
5307 m_LastUseFrameIndex = currentFrameIndex;
5308 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5309 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5311 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5313 #if VMA_STATS_STRING_ENABLED
5314 m_CreationFrameIndex = currentFrameIndex;
5315 m_BufferImageUsage = 0;
5321 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5324 VMA_ASSERT(m_pUserData == VMA_NULL);
5327 void InitBlockAllocation(
5328 VmaDeviceMemoryBlock* block,
5329 VkDeviceSize offset,
5330 VkDeviceSize alignment,
5332 uint32_t memoryTypeIndex,
5333 VmaSuballocationType suballocationType,
5337 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5338 VMA_ASSERT(block != VMA_NULL);
5339 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5340 m_Alignment = alignment;
5342 m_MemoryTypeIndex = memoryTypeIndex;
5343 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5344 m_SuballocationType = (uint8_t)suballocationType;
5345 m_BlockAllocation.m_Block = block;
5346 m_BlockAllocation.m_Offset = offset;
5347 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5352 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5353 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5354 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5355 m_MemoryTypeIndex = 0;
5356 m_BlockAllocation.m_Block = VMA_NULL;
5357 m_BlockAllocation.m_Offset = 0;
5358 m_BlockAllocation.m_CanBecomeLost =
true;
5361 void ChangeBlockAllocation(
5363 VmaDeviceMemoryBlock* block,
5364 VkDeviceSize offset);
5366 void ChangeOffset(VkDeviceSize newOffset);
5369 void InitDedicatedAllocation(
5370 uint32_t memoryTypeIndex,
5371 VkDeviceMemory hMemory,
5372 VmaSuballocationType suballocationType,
5376 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5377 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5378 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5381 m_MemoryTypeIndex = memoryTypeIndex;
5382 m_SuballocationType = (uint8_t)suballocationType;
5383 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5384 m_DedicatedAllocation.m_hMemory = hMemory;
5385 m_DedicatedAllocation.m_pMappedData = pMappedData;
5388 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5389 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5390 VkDeviceSize GetSize()
const {
return m_Size; }
5391 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5392 void* GetUserData()
const {
return m_pUserData; }
5393 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5394 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5396 VmaDeviceMemoryBlock* GetBlock()
const
5398 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5399 return m_BlockAllocation.m_Block;
5401 VkDeviceSize GetOffset()
const;
5402 VkDeviceMemory GetMemory()
const;
5403 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5404 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5405 void* GetMappedData()
const;
5406 bool CanBecomeLost()
const;
5408 uint32_t GetLastUseFrameIndex()
const
5410 return m_LastUseFrameIndex.load();
5412 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5414 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5424 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5426 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5428 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5439 void BlockAllocMap();
5440 void BlockAllocUnmap();
5441 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5444 #if VMA_STATS_STRING_ENABLED
5445 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5446 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5448 void InitBufferImageUsage(uint32_t bufferImageUsage)
5450 VMA_ASSERT(m_BufferImageUsage == 0);
5451 m_BufferImageUsage = bufferImageUsage;
5454 void PrintParameters(
class VmaJsonWriter& json)
const;
5458 VkDeviceSize m_Alignment;
5459 VkDeviceSize m_Size;
5461 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5462 uint32_t m_MemoryTypeIndex;
5464 uint8_t m_SuballocationType;
5471 struct BlockAllocation
5473 VmaDeviceMemoryBlock* m_Block;
5474 VkDeviceSize m_Offset;
5475 bool m_CanBecomeLost;
5479 struct DedicatedAllocation
5481 VkDeviceMemory m_hMemory;
5482 void* m_pMappedData;
5488 BlockAllocation m_BlockAllocation;
5490 DedicatedAllocation m_DedicatedAllocation;
5493 #if VMA_STATS_STRING_ENABLED
5494 uint32_t m_CreationFrameIndex;
5495 uint32_t m_BufferImageUsage;
5505 struct VmaSuballocation
5507 VkDeviceSize offset;
5510 VmaSuballocationType type;
5514 struct VmaSuballocationOffsetLess
5516 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5518 return lhs.offset < rhs.offset;
5521 struct VmaSuballocationOffsetGreater
5523 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5525 return lhs.offset > rhs.offset;
5529 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5532 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5534 enum class VmaAllocationRequestType
5556 struct VmaAllocationRequest
5558 VkDeviceSize offset;
5559 VkDeviceSize sumFreeSize;
5560 VkDeviceSize sumItemSize;
5561 VmaSuballocationList::iterator item;
5562 size_t itemsToMakeLostCount;
5564 VmaAllocationRequestType type;
5566 VkDeviceSize CalcCost()
const
5568 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5576 class VmaBlockMetadata
5580 virtual ~VmaBlockMetadata() { }
5581 virtual void Init(VkDeviceSize size) { m_Size = size; }
5584 virtual bool Validate()
const = 0;
5585 VkDeviceSize GetSize()
const {
return m_Size; }
5586 virtual size_t GetAllocationCount()
const = 0;
5587 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5588 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5590 virtual bool IsEmpty()
const = 0;
5592 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5594 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5596 #if VMA_STATS_STRING_ENABLED
5597 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5603 virtual bool CreateAllocationRequest(
5604 uint32_t currentFrameIndex,
5605 uint32_t frameInUseCount,
5606 VkDeviceSize bufferImageGranularity,
5607 VkDeviceSize allocSize,
5608 VkDeviceSize allocAlignment,
5610 VmaSuballocationType allocType,
5611 bool canMakeOtherLost,
5614 VmaAllocationRequest* pAllocationRequest) = 0;
5616 virtual bool MakeRequestedAllocationsLost(
5617 uint32_t currentFrameIndex,
5618 uint32_t frameInUseCount,
5619 VmaAllocationRequest* pAllocationRequest) = 0;
5621 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5623 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5627 const VmaAllocationRequest& request,
5628 VmaSuballocationType type,
5629 VkDeviceSize allocSize,
5634 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5637 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5639 #if VMA_STATS_STRING_ENABLED
5640 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5641 VkDeviceSize unusedBytes,
5642 size_t allocationCount,
5643 size_t unusedRangeCount)
const;
5644 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5645 VkDeviceSize offset,
5647 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5648 VkDeviceSize offset,
5649 VkDeviceSize size)
const;
5650 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5654 VkDeviceSize m_Size;
5655 const VkAllocationCallbacks* m_pAllocationCallbacks;
5658 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5659 VMA_ASSERT(0 && "Validation failed: " #cond); \
5663 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5665 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5668 virtual ~VmaBlockMetadata_Generic();
5669 virtual void Init(VkDeviceSize size);
5671 virtual bool Validate()
const;
5672 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5673 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5674 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5675 virtual bool IsEmpty()
const;
5677 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5678 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5680 #if VMA_STATS_STRING_ENABLED
5681 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5684 virtual bool CreateAllocationRequest(
5685 uint32_t currentFrameIndex,
5686 uint32_t frameInUseCount,
5687 VkDeviceSize bufferImageGranularity,
5688 VkDeviceSize allocSize,
5689 VkDeviceSize allocAlignment,
5691 VmaSuballocationType allocType,
5692 bool canMakeOtherLost,
5694 VmaAllocationRequest* pAllocationRequest);
5696 virtual bool MakeRequestedAllocationsLost(
5697 uint32_t currentFrameIndex,
5698 uint32_t frameInUseCount,
5699 VmaAllocationRequest* pAllocationRequest);
5701 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5703 virtual VkResult CheckCorruption(
const void* pBlockData);
5706 const VmaAllocationRequest& request,
5707 VmaSuballocationType type,
5708 VkDeviceSize allocSize,
5712 virtual void FreeAtOffset(VkDeviceSize offset);
5717 bool IsBufferImageGranularityConflictPossible(
5718 VkDeviceSize bufferImageGranularity,
5719 VmaSuballocationType& inOutPrevSuballocType)
const;
5722 friend class VmaDefragmentationAlgorithm_Generic;
5723 friend class VmaDefragmentationAlgorithm_Fast;
5725 uint32_t m_FreeCount;
5726 VkDeviceSize m_SumFreeSize;
5727 VmaSuballocationList m_Suballocations;
5730 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5732 bool ValidateFreeSuballocationList()
const;
5736 bool CheckAllocation(
5737 uint32_t currentFrameIndex,
5738 uint32_t frameInUseCount,
5739 VkDeviceSize bufferImageGranularity,
5740 VkDeviceSize allocSize,
5741 VkDeviceSize allocAlignment,
5742 VmaSuballocationType allocType,
5743 VmaSuballocationList::const_iterator suballocItem,
5744 bool canMakeOtherLost,
5745 VkDeviceSize* pOffset,
5746 size_t* itemsToMakeLostCount,
5747 VkDeviceSize* pSumFreeSize,
5748 VkDeviceSize* pSumItemSize)
const;
5750 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5754 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5757 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5760 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5841 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5843 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5846 virtual ~VmaBlockMetadata_Linear();
5847 virtual void Init(VkDeviceSize size);
5849 virtual bool Validate()
const;
5850 virtual size_t GetAllocationCount()
const;
5851 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5852 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5853 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5855 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5856 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5858 #if VMA_STATS_STRING_ENABLED
5859 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5862 virtual bool CreateAllocationRequest(
5863 uint32_t currentFrameIndex,
5864 uint32_t frameInUseCount,
5865 VkDeviceSize bufferImageGranularity,
5866 VkDeviceSize allocSize,
5867 VkDeviceSize allocAlignment,
5869 VmaSuballocationType allocType,
5870 bool canMakeOtherLost,
5872 VmaAllocationRequest* pAllocationRequest);
5874 virtual bool MakeRequestedAllocationsLost(
5875 uint32_t currentFrameIndex,
5876 uint32_t frameInUseCount,
5877 VmaAllocationRequest* pAllocationRequest);
5879 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5881 virtual VkResult CheckCorruption(
const void* pBlockData);
5884 const VmaAllocationRequest& request,
5885 VmaSuballocationType type,
5886 VkDeviceSize allocSize,
5890 virtual void FreeAtOffset(VkDeviceSize offset);
5900 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5902 enum SECOND_VECTOR_MODE
5904 SECOND_VECTOR_EMPTY,
5909 SECOND_VECTOR_RING_BUFFER,
5915 SECOND_VECTOR_DOUBLE_STACK,
5918 VkDeviceSize m_SumFreeSize;
5919 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5920 uint32_t m_1stVectorIndex;
5921 SECOND_VECTOR_MODE m_2ndVectorMode;
5923 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5924 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5925 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5926 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5929 size_t m_1stNullItemsBeginCount;
5931 size_t m_1stNullItemsMiddleCount;
5933 size_t m_2ndNullItemsCount;
5935 bool ShouldCompact1st()
const;
5936 void CleanupAfterFree();
5938 bool CreateAllocationRequest_LowerAddress(
5939 uint32_t currentFrameIndex,
5940 uint32_t frameInUseCount,
5941 VkDeviceSize bufferImageGranularity,
5942 VkDeviceSize allocSize,
5943 VkDeviceSize allocAlignment,
5944 VmaSuballocationType allocType,
5945 bool canMakeOtherLost,
5947 VmaAllocationRequest* pAllocationRequest);
5948 bool CreateAllocationRequest_UpperAddress(
5949 uint32_t currentFrameIndex,
5950 uint32_t frameInUseCount,
5951 VkDeviceSize bufferImageGranularity,
5952 VkDeviceSize allocSize,
5953 VkDeviceSize allocAlignment,
5954 VmaSuballocationType allocType,
5955 bool canMakeOtherLost,
5957 VmaAllocationRequest* pAllocationRequest);
5971 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5973 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5976 virtual ~VmaBlockMetadata_Buddy();
5977 virtual void Init(VkDeviceSize size);
5979 virtual bool Validate()
const;
5980 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5981 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5982 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5983 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5985 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5986 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5988 #if VMA_STATS_STRING_ENABLED
5989 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5992 virtual bool CreateAllocationRequest(
5993 uint32_t currentFrameIndex,
5994 uint32_t frameInUseCount,
5995 VkDeviceSize bufferImageGranularity,
5996 VkDeviceSize allocSize,
5997 VkDeviceSize allocAlignment,
5999 VmaSuballocationType allocType,
6000 bool canMakeOtherLost,
6002 VmaAllocationRequest* pAllocationRequest);
6004 virtual bool MakeRequestedAllocationsLost(
6005 uint32_t currentFrameIndex,
6006 uint32_t frameInUseCount,
6007 VmaAllocationRequest* pAllocationRequest);
6009 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6011 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6014 const VmaAllocationRequest& request,
6015 VmaSuballocationType type,
6016 VkDeviceSize allocSize,
6019 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6020 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6023 static const VkDeviceSize MIN_NODE_SIZE = 32;
6024 static const size_t MAX_LEVELS = 30;
6026 struct ValidationContext
6028 size_t calculatedAllocationCount;
6029 size_t calculatedFreeCount;
6030 VkDeviceSize calculatedSumFreeSize;
6032 ValidationContext() :
6033 calculatedAllocationCount(0),
6034 calculatedFreeCount(0),
6035 calculatedSumFreeSize(0) { }
6040 VkDeviceSize offset;
6070 VkDeviceSize m_UsableSize;
6071 uint32_t m_LevelCount;
6077 } m_FreeList[MAX_LEVELS];
6079 size_t m_AllocationCount;
6083 VkDeviceSize m_SumFreeSize;
6085 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6086 void DeleteNode(Node* node);
6087 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6088 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6089 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6091 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6092 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6096 void AddToFreeListFront(uint32_t level, Node* node);
6100 void RemoveFromFreeList(uint32_t level, Node* node);
6102 #if VMA_STATS_STRING_ENABLED
6103 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6113 class VmaDeviceMemoryBlock
6115 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6117 VmaBlockMetadata* m_pMetadata;
6121 ~VmaDeviceMemoryBlock()
6123 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6124 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6131 uint32_t newMemoryTypeIndex,
6132 VkDeviceMemory newMemory,
6133 VkDeviceSize newSize,
6135 uint32_t algorithm);
6139 VmaPool GetParentPool()
const {
return m_hParentPool; }
6140 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6141 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6142 uint32_t GetId()
const {
return m_Id; }
6143 void* GetMappedData()
const {
return m_pMappedData; }
6146 bool Validate()
const;
6151 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6154 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6155 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6157 VkResult BindBufferMemory(
6160 VkDeviceSize allocationLocalOffset,
6163 VkResult BindImageMemory(
6166 VkDeviceSize allocationLocalOffset,
6172 uint32_t m_MemoryTypeIndex;
6174 VkDeviceMemory m_hMemory;
6182 uint32_t m_MapCount;
6183 void* m_pMappedData;
6186 struct VmaPointerLess
6188 bool operator()(
const void* lhs,
const void* rhs)
const
6194 struct VmaDefragmentationMove
6196 size_t srcBlockIndex;
6197 size_t dstBlockIndex;
6198 VkDeviceSize srcOffset;
6199 VkDeviceSize dstOffset;
6203 class VmaDefragmentationAlgorithm;
6211 struct VmaBlockVector
6213 VMA_CLASS_NO_COPY(VmaBlockVector)
6218 uint32_t memoryTypeIndex,
6219 VkDeviceSize preferredBlockSize,
6220 size_t minBlockCount,
6221 size_t maxBlockCount,
6222 VkDeviceSize bufferImageGranularity,
6223 uint32_t frameInUseCount,
6224 bool explicitBlockSize,
6225 uint32_t algorithm);
6228 VkResult CreateMinBlocks();
6230 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6231 VmaPool GetParentPool()
const {
return m_hParentPool; }
6232 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6233 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6234 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6235 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6236 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6237 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6241 bool IsEmpty()
const {
return m_Blocks.empty(); }
6242 bool IsCorruptionDetectionEnabled()
const;
6245 uint32_t currentFrameIndex,
6247 VkDeviceSize alignment,
6249 VmaSuballocationType suballocType,
6250 size_t allocationCount,
6258 #if VMA_STATS_STRING_ENABLED
6259 void PrintDetailedMap(
class VmaJsonWriter& json);
6262 void MakePoolAllocationsLost(
6263 uint32_t currentFrameIndex,
6264 size_t* pLostAllocationCount);
6265 VkResult CheckCorruption();
6269 class VmaBlockVectorDefragmentationContext* pCtx,
6271 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6272 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6273 VkCommandBuffer commandBuffer);
6274 void DefragmentationEnd(
6275 class VmaBlockVectorDefragmentationContext* pCtx,
6281 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6282 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6283 size_t CalcAllocationCount()
const;
6284 bool IsBufferImageGranularityConflictPossible()
const;
6287 friend class VmaDefragmentationAlgorithm_Generic;
6291 const uint32_t m_MemoryTypeIndex;
6292 const VkDeviceSize m_PreferredBlockSize;
6293 const size_t m_MinBlockCount;
6294 const size_t m_MaxBlockCount;
6295 const VkDeviceSize m_BufferImageGranularity;
6296 const uint32_t m_FrameInUseCount;
6297 const bool m_ExplicitBlockSize;
6298 const uint32_t m_Algorithm;
6302 bool m_HasEmptyBlock;
6303 VMA_RW_MUTEX m_Mutex;
6305 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6306 uint32_t m_NextBlockId;
6308 VkDeviceSize CalcMaxBlockSize()
const;
6311 void Remove(VmaDeviceMemoryBlock* pBlock);
6315 void IncrementallySortBlocks();
6317 VkResult AllocatePage(
6318 uint32_t currentFrameIndex,
6320 VkDeviceSize alignment,
6322 VmaSuballocationType suballocType,
6326 VkResult AllocateFromBlock(
6327 VmaDeviceMemoryBlock* pBlock,
6328 uint32_t currentFrameIndex,
6330 VkDeviceSize alignment,
6333 VmaSuballocationType suballocType,
6337 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6340 void ApplyDefragmentationMovesCpu(
6341 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6342 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6344 void ApplyDefragmentationMovesGpu(
6345 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6346 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6347 VkCommandBuffer commandBuffer);
6358 VMA_CLASS_NO_COPY(VmaPool_T)
6360 VmaBlockVector m_BlockVector;
6365 VkDeviceSize preferredBlockSize);
6368 uint32_t GetId()
const {
return m_Id; }
6369 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6371 const char* GetName()
const {
return m_Name; }
6372 void SetName(
const char* pName);
6374 #if VMA_STATS_STRING_ENABLED
6390 class VmaDefragmentationAlgorithm
6392 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6394 VmaDefragmentationAlgorithm(
6396 VmaBlockVector* pBlockVector,
6397 uint32_t currentFrameIndex) :
6398 m_hAllocator(hAllocator),
6399 m_pBlockVector(pBlockVector),
6400 m_CurrentFrameIndex(currentFrameIndex)
6403 virtual ~VmaDefragmentationAlgorithm()
6407 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6408 virtual void AddAll() = 0;
6410 virtual VkResult Defragment(
6411 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6412 VkDeviceSize maxBytesToMove,
6413 uint32_t maxAllocationsToMove) = 0;
6415 virtual VkDeviceSize GetBytesMoved()
const = 0;
6416 virtual uint32_t GetAllocationsMoved()
const = 0;
6420 VmaBlockVector*
const m_pBlockVector;
6421 const uint32_t m_CurrentFrameIndex;
6423 struct AllocationInfo
6426 VkBool32* m_pChanged;
6429 m_hAllocation(VK_NULL_HANDLE),
6430 m_pChanged(VMA_NULL)
6434 m_hAllocation(hAlloc),
6435 m_pChanged(pChanged)
6441 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6443 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6445 VmaDefragmentationAlgorithm_Generic(
6447 VmaBlockVector* pBlockVector,
6448 uint32_t currentFrameIndex,
6449 bool overlappingMoveSupported);
6450 virtual ~VmaDefragmentationAlgorithm_Generic();
6452 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6453 virtual void AddAll() { m_AllAllocations =
true; }
6455 virtual VkResult Defragment(
6456 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6457 VkDeviceSize maxBytesToMove,
6458 uint32_t maxAllocationsToMove);
6460 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6461 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6464 uint32_t m_AllocationCount;
6465 bool m_AllAllocations;
6467 VkDeviceSize m_BytesMoved;
6468 uint32_t m_AllocationsMoved;
6470 struct AllocationInfoSizeGreater
6472 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6474 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6478 struct AllocationInfoOffsetGreater
6480 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6482 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6488 size_t m_OriginalBlockIndex;
6489 VmaDeviceMemoryBlock* m_pBlock;
6490 bool m_HasNonMovableAllocations;
6491 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6493 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6494 m_OriginalBlockIndex(SIZE_MAX),
6496 m_HasNonMovableAllocations(true),
6497 m_Allocations(pAllocationCallbacks)
6501 void CalcHasNonMovableAllocations()
6503 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6504 const size_t defragmentAllocCount = m_Allocations.size();
6505 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6508 void SortAllocationsBySizeDescending()
6510 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6513 void SortAllocationsByOffsetDescending()
6515 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6519 struct BlockPointerLess
6521 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6523 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6525 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6527 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6533 struct BlockInfoCompareMoveDestination
6535 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6537 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6541 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6545 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6553 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6554 BlockInfoVector m_Blocks;
6556 VkResult DefragmentRound(
6557 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6558 VkDeviceSize maxBytesToMove,
6559 uint32_t maxAllocationsToMove);
6561 size_t CalcBlocksWithNonMovableCount()
const;
6563 static bool MoveMakesSense(
6564 size_t dstBlockIndex, VkDeviceSize dstOffset,
6565 size_t srcBlockIndex, VkDeviceSize srcOffset);
6568 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6570 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6572 VmaDefragmentationAlgorithm_Fast(
6574 VmaBlockVector* pBlockVector,
6575 uint32_t currentFrameIndex,
6576 bool overlappingMoveSupported);
6577 virtual ~VmaDefragmentationAlgorithm_Fast();
6579 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6580 virtual void AddAll() { m_AllAllocations =
true; }
6582 virtual VkResult Defragment(
6583 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6584 VkDeviceSize maxBytesToMove,
6585 uint32_t maxAllocationsToMove);
6587 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6588 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6593 size_t origBlockIndex;
6596 class FreeSpaceDatabase
6602 s.blockInfoIndex = SIZE_MAX;
6603 for(
size_t i = 0; i < MAX_COUNT; ++i)
6605 m_FreeSpaces[i] = s;
6609 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6611 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6617 size_t bestIndex = SIZE_MAX;
6618 for(
size_t i = 0; i < MAX_COUNT; ++i)
6621 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6626 if(m_FreeSpaces[i].size < size &&
6627 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6633 if(bestIndex != SIZE_MAX)
6635 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6636 m_FreeSpaces[bestIndex].offset = offset;
6637 m_FreeSpaces[bestIndex].size = size;
6641 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6642 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6644 size_t bestIndex = SIZE_MAX;
6645 VkDeviceSize bestFreeSpaceAfter = 0;
6646 for(
size_t i = 0; i < MAX_COUNT; ++i)
6649 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6651 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6653 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6655 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6657 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6660 bestFreeSpaceAfter = freeSpaceAfter;
6666 if(bestIndex != SIZE_MAX)
6668 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6669 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6671 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6674 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6675 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6676 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6681 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6691 static const size_t MAX_COUNT = 4;
6695 size_t blockInfoIndex;
6696 VkDeviceSize offset;
6698 } m_FreeSpaces[MAX_COUNT];
6701 const bool m_OverlappingMoveSupported;
6703 uint32_t m_AllocationCount;
6704 bool m_AllAllocations;
6706 VkDeviceSize m_BytesMoved;
6707 uint32_t m_AllocationsMoved;
6709 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6711 void PreprocessMetadata();
6712 void PostprocessMetadata();
6713 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6716 struct VmaBlockDefragmentationContext
6720 BLOCK_FLAG_USED = 0x00000001,
6726 class VmaBlockVectorDefragmentationContext
6728 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6732 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6734 VmaBlockVectorDefragmentationContext(
6737 VmaBlockVector* pBlockVector,
6738 uint32_t currFrameIndex);
6739 ~VmaBlockVectorDefragmentationContext();
6741 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6742 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6743 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6745 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6746 void AddAll() { m_AllAllocations =
true; }
6748 void Begin(
bool overlappingMoveSupported);
6755 VmaBlockVector*
const m_pBlockVector;
6756 const uint32_t m_CurrFrameIndex;
6758 VmaDefragmentationAlgorithm* m_pAlgorithm;
6766 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6767 bool m_AllAllocations;
6770 struct VmaDefragmentationContext_T
6773 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6775 VmaDefragmentationContext_T(
6777 uint32_t currFrameIndex,
6780 ~VmaDefragmentationContext_T();
6782 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6783 void AddAllocations(
6784 uint32_t allocationCount,
6786 VkBool32* pAllocationsChanged);
6794 VkResult Defragment(
6795 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6796 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6801 const uint32_t m_CurrFrameIndex;
6802 const uint32_t m_Flags;
6805 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6807 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6810 #if VMA_RECORDING_ENABLED
6817 void WriteConfiguration(
6818 const VkPhysicalDeviceProperties& devProps,
6819 const VkPhysicalDeviceMemoryProperties& memProps,
6820 bool dedicatedAllocationExtensionEnabled,
6821 bool bindMemory2ExtensionEnabled,
6822 bool memoryBudgetExtensionEnabled);
6825 void RecordCreateAllocator(uint32_t frameIndex);
6826 void RecordDestroyAllocator(uint32_t frameIndex);
6827 void RecordCreatePool(uint32_t frameIndex,
6830 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6831 void RecordAllocateMemory(uint32_t frameIndex,
6832 const VkMemoryRequirements& vkMemReq,
6835 void RecordAllocateMemoryPages(uint32_t frameIndex,
6836 const VkMemoryRequirements& vkMemReq,
6838 uint64_t allocationCount,
6840 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6841 const VkMemoryRequirements& vkMemReq,
6842 bool requiresDedicatedAllocation,
6843 bool prefersDedicatedAllocation,
6846 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6847 const VkMemoryRequirements& vkMemReq,
6848 bool requiresDedicatedAllocation,
6849 bool prefersDedicatedAllocation,
6852 void RecordFreeMemory(uint32_t frameIndex,
6854 void RecordFreeMemoryPages(uint32_t frameIndex,
6855 uint64_t allocationCount,
6857 void RecordSetAllocationUserData(uint32_t frameIndex,
6859 const void* pUserData);
6860 void RecordCreateLostAllocation(uint32_t frameIndex,
6862 void RecordMapMemory(uint32_t frameIndex,
6864 void RecordUnmapMemory(uint32_t frameIndex,
6866 void RecordFlushAllocation(uint32_t frameIndex,
6867 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6868 void RecordInvalidateAllocation(uint32_t frameIndex,
6869 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6870 void RecordCreateBuffer(uint32_t frameIndex,
6871 const VkBufferCreateInfo& bufCreateInfo,
6874 void RecordCreateImage(uint32_t frameIndex,
6875 const VkImageCreateInfo& imageCreateInfo,
6878 void RecordDestroyBuffer(uint32_t frameIndex,
6880 void RecordDestroyImage(uint32_t frameIndex,
6882 void RecordTouchAllocation(uint32_t frameIndex,
6884 void RecordGetAllocationInfo(uint32_t frameIndex,
6886 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6888 void RecordDefragmentationBegin(uint32_t frameIndex,
6891 void RecordDefragmentationEnd(uint32_t frameIndex,
6893 void RecordSetPoolName(uint32_t frameIndex,
6904 class UserDataString
6908 const char* GetString()
const {
return m_Str; }
6918 VMA_MUTEX m_FileMutex;
6920 int64_t m_StartCounter;
6922 void GetBasicParams(CallParams& outParams);
6925 template<
typename T>
6926 void PrintPointerList(uint64_t count,
const T* pItems)
6930 fprintf(m_File,
"%p", pItems[0]);
6931 for(uint64_t i = 1; i < count; ++i)
6933 fprintf(m_File,
" %p", pItems[i]);
6938 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6942 #endif // #if VMA_RECORDING_ENABLED
6947 class VmaAllocationObjectAllocator
6949 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6951 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6958 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6961 struct VmaCurrentBudgetData
6963 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
6964 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
6966 #if VMA_MEMORY_BUDGET
6967 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
6968 VMA_RW_MUTEX m_BudgetMutex;
6969 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
6970 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
6971 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
6972 #endif // #if VMA_MEMORY_BUDGET
6974 VmaCurrentBudgetData()
6976 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
6978 m_BlockBytes[heapIndex] = 0;
6979 m_AllocationBytes[heapIndex] = 0;
6980 #if VMA_MEMORY_BUDGET
6981 m_VulkanUsage[heapIndex] = 0;
6982 m_VulkanBudget[heapIndex] = 0;
6983 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
6987 #if VMA_MEMORY_BUDGET
6988 m_OperationsSinceBudgetFetch = 0;
6992 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
6994 m_AllocationBytes[heapIndex] += allocationSize;
6995 #if VMA_MEMORY_BUDGET
6996 ++m_OperationsSinceBudgetFetch;
7000 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7002 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7003 m_AllocationBytes[heapIndex] -= allocationSize;
7004 #if VMA_MEMORY_BUDGET
7005 ++m_OperationsSinceBudgetFetch;
7011 struct VmaAllocator_T
7013 VMA_CLASS_NO_COPY(VmaAllocator_T)
7016 bool m_UseKhrDedicatedAllocation;
7017 bool m_UseKhrBindMemory2;
7018 bool m_UseExtMemoryBudget;
7020 VkInstance m_hInstance;
7021 bool m_AllocationCallbacksSpecified;
7022 VkAllocationCallbacks m_AllocationCallbacks;
7024 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7027 uint32_t m_HeapSizeLimitMask;
7029 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7030 VkPhysicalDeviceMemoryProperties m_MemProps;
7033 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7036 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7037 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7038 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7040 VmaCurrentBudgetData m_Budget;
7046 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7048 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7052 return m_VulkanFunctions;
7055 VkDeviceSize GetBufferImageGranularity()
const
7058 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7059 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7062 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7063 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7065 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7067 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7068 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7071 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7073 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7074 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7077 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7079 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7080 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7081 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7084 bool IsIntegratedGpu()
const
7086 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7089 #if VMA_RECORDING_ENABLED
7090 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7093 void GetBufferMemoryRequirements(
7095 VkMemoryRequirements& memReq,
7096 bool& requiresDedicatedAllocation,
7097 bool& prefersDedicatedAllocation)
const;
7098 void GetImageMemoryRequirements(
7100 VkMemoryRequirements& memReq,
7101 bool& requiresDedicatedAllocation,
7102 bool& prefersDedicatedAllocation)
const;
7105 VkResult AllocateMemory(
7106 const VkMemoryRequirements& vkMemReq,
7107 bool requiresDedicatedAllocation,
7108 bool prefersDedicatedAllocation,
7109 VkBuffer dedicatedBuffer,
7110 VkImage dedicatedImage,
7112 VmaSuballocationType suballocType,
7113 size_t allocationCount,
7118 size_t allocationCount,
7121 VkResult ResizeAllocation(
7123 VkDeviceSize newSize);
7125 void CalculateStats(
VmaStats* pStats);
7128 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7130 #if VMA_STATS_STRING_ENABLED
7131 void PrintDetailedMap(
class VmaJsonWriter& json);
7134 VkResult DefragmentationBegin(
7138 VkResult DefragmentationEnd(
7145 void DestroyPool(
VmaPool pool);
7148 void SetCurrentFrameIndex(uint32_t frameIndex);
7149 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7151 void MakePoolAllocationsLost(
7153 size_t* pLostAllocationCount);
7154 VkResult CheckPoolCorruption(
VmaPool hPool);
7155 VkResult CheckCorruption(uint32_t memoryTypeBits);
7160 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7162 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7164 VkResult BindVulkanBuffer(
7165 VkDeviceMemory memory,
7166 VkDeviceSize memoryOffset,
7170 VkResult BindVulkanImage(
7171 VkDeviceMemory memory,
7172 VkDeviceSize memoryOffset,
7179 VkResult BindBufferMemory(
7181 VkDeviceSize allocationLocalOffset,
7184 VkResult BindImageMemory(
7186 VkDeviceSize allocationLocalOffset,
7190 void FlushOrInvalidateAllocation(
7192 VkDeviceSize offset, VkDeviceSize size,
7193 VMA_CACHE_OPERATION op);
7195 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7201 uint32_t GetGpuDefragmentationMemoryTypeBits();
7204 VkDeviceSize m_PreferredLargeHeapBlockSize;
7206 VkPhysicalDevice m_PhysicalDevice;
7207 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7208 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7210 VMA_RW_MUTEX m_PoolsMutex;
7212 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7213 uint32_t m_NextPoolId;
7217 #if VMA_RECORDING_ENABLED
7218 VmaRecorder* m_pRecorder;
7223 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7225 VkResult AllocateMemoryOfType(
7227 VkDeviceSize alignment,
7228 bool dedicatedAllocation,
7229 VkBuffer dedicatedBuffer,
7230 VkImage dedicatedImage,
7232 uint32_t memTypeIndex,
7233 VmaSuballocationType suballocType,
7234 size_t allocationCount,
7238 VkResult AllocateDedicatedMemoryPage(
7240 VmaSuballocationType suballocType,
7241 uint32_t memTypeIndex,
7242 const VkMemoryAllocateInfo& allocInfo,
7244 bool isUserDataString,
7249 VkResult AllocateDedicatedMemory(
7251 VmaSuballocationType suballocType,
7252 uint32_t memTypeIndex,
7255 bool isUserDataString,
7257 VkBuffer dedicatedBuffer,
7258 VkImage dedicatedImage,
7259 size_t allocationCount,
7268 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7270 #if VMA_MEMORY_BUDGET
7271 void UpdateVulkanBudget();
7272 #endif // #if VMA_MEMORY_BUDGET
7278 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7280 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7283 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7285 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7288 template<
typename T>
7291 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7294 template<
typename T>
7295 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7297 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7300 template<
typename T>
7301 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7306 VmaFree(hAllocator, ptr);
7310 template<
typename T>
7311 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7315 for(
size_t i = count; i--; )
7317 VmaFree(hAllocator, ptr);
7324 #if VMA_STATS_STRING_ENABLED
7326 class VmaStringBuilder
7329 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7330 size_t GetLength()
const {
return m_Data.size(); }
7331 const char* GetData()
const {
return m_Data.data(); }
7333 void Add(
char ch) { m_Data.push_back(ch); }
7334 void Add(
const char* pStr);
7335 void AddNewLine() { Add(
'\n'); }
7336 void AddNumber(uint32_t num);
7337 void AddNumber(uint64_t num);
7338 void AddPointer(
const void* ptr);
7341 VmaVector< char, VmaStlAllocator<char> > m_Data;
7344 void VmaStringBuilder::Add(
const char* pStr)
7346 const size_t strLen = strlen(pStr);
7349 const size_t oldCount = m_Data.size();
7350 m_Data.resize(oldCount + strLen);
7351 memcpy(m_Data.data() + oldCount, pStr, strLen);
7355 void VmaStringBuilder::AddNumber(uint32_t num)
7362 *--p =
'0' + (num % 10);
7369 void VmaStringBuilder::AddNumber(uint64_t num)
7376 *--p =
'0' + (num % 10);
7383 void VmaStringBuilder::AddPointer(
const void* ptr)
7386 VmaPtrToStr(buf,
sizeof(buf), ptr);
7390 #endif // #if VMA_STATS_STRING_ENABLED
7395 #if VMA_STATS_STRING_ENABLED
7399 VMA_CLASS_NO_COPY(VmaJsonWriter)
7401 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7404 void BeginObject(
bool singleLine =
false);
7407 void BeginArray(
bool singleLine =
false);
7410 void WriteString(
const char* pStr);
7411 void BeginString(
const char* pStr = VMA_NULL);
7412 void ContinueString(
const char* pStr);
7413 void ContinueString(uint32_t n);
7414 void ContinueString(uint64_t n);
7415 void ContinueString_Pointer(
const void* ptr);
7416 void EndString(
const char* pStr = VMA_NULL);
7418 void WriteNumber(uint32_t n);
7419 void WriteNumber(uint64_t n);
7420 void WriteBool(
bool b);
7424 static const char*
const INDENT;
7426 enum COLLECTION_TYPE
7428 COLLECTION_TYPE_OBJECT,
7429 COLLECTION_TYPE_ARRAY,
7433 COLLECTION_TYPE type;
7434 uint32_t valueCount;
7435 bool singleLineMode;
7438 VmaStringBuilder& m_SB;
7439 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7440 bool m_InsideString;
7442 void BeginValue(
bool isString);
7443 void WriteIndent(
bool oneLess =
false);
7446 const char*
const VmaJsonWriter::INDENT =
" ";
7448 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7450 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7451 m_InsideString(false)
7455 VmaJsonWriter::~VmaJsonWriter()
7457 VMA_ASSERT(!m_InsideString);
7458 VMA_ASSERT(m_Stack.empty());
7461 void VmaJsonWriter::BeginObject(
bool singleLine)
7463 VMA_ASSERT(!m_InsideString);
7469 item.type = COLLECTION_TYPE_OBJECT;
7470 item.valueCount = 0;
7471 item.singleLineMode = singleLine;
7472 m_Stack.push_back(item);
7475 void VmaJsonWriter::EndObject()
7477 VMA_ASSERT(!m_InsideString);
7482 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7486 void VmaJsonWriter::BeginArray(
bool singleLine)
7488 VMA_ASSERT(!m_InsideString);
7494 item.type = COLLECTION_TYPE_ARRAY;
7495 item.valueCount = 0;
7496 item.singleLineMode = singleLine;
7497 m_Stack.push_back(item);
7500 void VmaJsonWriter::EndArray()
7502 VMA_ASSERT(!m_InsideString);
7507 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7511 void VmaJsonWriter::WriteString(
const char* pStr)
7517 void VmaJsonWriter::BeginString(
const char* pStr)
7519 VMA_ASSERT(!m_InsideString);
7523 m_InsideString =
true;
7524 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7526 ContinueString(pStr);
7530 void VmaJsonWriter::ContinueString(
const char* pStr)
7532 VMA_ASSERT(m_InsideString);
7534 const size_t strLen = strlen(pStr);
7535 for(
size_t i = 0; i < strLen; ++i)
7568 VMA_ASSERT(0 &&
"Character not currently supported.");
7574 void VmaJsonWriter::ContinueString(uint32_t n)
7576 VMA_ASSERT(m_InsideString);
7580 void VmaJsonWriter::ContinueString(uint64_t n)
7582 VMA_ASSERT(m_InsideString);
7586 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7588 VMA_ASSERT(m_InsideString);
7589 m_SB.AddPointer(ptr);
7592 void VmaJsonWriter::EndString(
const char* pStr)
7594 VMA_ASSERT(m_InsideString);
7595 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7597 ContinueString(pStr);
7600 m_InsideString =
false;
7603 void VmaJsonWriter::WriteNumber(uint32_t n)
7605 VMA_ASSERT(!m_InsideString);
7610 void VmaJsonWriter::WriteNumber(uint64_t n)
7612 VMA_ASSERT(!m_InsideString);
7617 void VmaJsonWriter::WriteBool(
bool b)
7619 VMA_ASSERT(!m_InsideString);
7621 m_SB.Add(b ?
"true" :
"false");
7624 void VmaJsonWriter::WriteNull()
7626 VMA_ASSERT(!m_InsideString);
7631 void VmaJsonWriter::BeginValue(
bool isString)
7633 if(!m_Stack.empty())
7635 StackItem& currItem = m_Stack.back();
7636 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7637 currItem.valueCount % 2 == 0)
7639 VMA_ASSERT(isString);
7642 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7643 currItem.valueCount % 2 != 0)
7647 else if(currItem.valueCount > 0)
7656 ++currItem.valueCount;
7660 void VmaJsonWriter::WriteIndent(
bool oneLess)
7662 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7666 size_t count = m_Stack.size();
7667 if(count > 0 && oneLess)
7671 for(
size_t i = 0; i < count; ++i)
7678 #endif // #if VMA_STATS_STRING_ENABLED
7682 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7684 if(IsUserDataString())
7686 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7688 FreeUserDataString(hAllocator);
7690 if(pUserData != VMA_NULL)
7692 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7697 m_pUserData = pUserData;
7701 void VmaAllocation_T::ChangeBlockAllocation(
7703 VmaDeviceMemoryBlock* block,
7704 VkDeviceSize offset)
7706 VMA_ASSERT(block != VMA_NULL);
7707 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7710 if(block != m_BlockAllocation.m_Block)
7712 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7713 if(IsPersistentMap())
7715 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7716 block->Map(hAllocator, mapRefCount, VMA_NULL);
7719 m_BlockAllocation.m_Block = block;
7720 m_BlockAllocation.m_Offset = offset;
7723 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7725 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7726 m_BlockAllocation.m_Offset = newOffset;
7729 VkDeviceSize VmaAllocation_T::GetOffset()
const
7733 case ALLOCATION_TYPE_BLOCK:
7734 return m_BlockAllocation.m_Offset;
7735 case ALLOCATION_TYPE_DEDICATED:
7743 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7747 case ALLOCATION_TYPE_BLOCK:
7748 return m_BlockAllocation.m_Block->GetDeviceMemory();
7749 case ALLOCATION_TYPE_DEDICATED:
7750 return m_DedicatedAllocation.m_hMemory;
7753 return VK_NULL_HANDLE;
7757 void* VmaAllocation_T::GetMappedData()
const
7761 case ALLOCATION_TYPE_BLOCK:
7764 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7765 VMA_ASSERT(pBlockData != VMA_NULL);
7766 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7773 case ALLOCATION_TYPE_DEDICATED:
7774 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7775 return m_DedicatedAllocation.m_pMappedData;
7782 bool VmaAllocation_T::CanBecomeLost()
const
7786 case ALLOCATION_TYPE_BLOCK:
7787 return m_BlockAllocation.m_CanBecomeLost;
7788 case ALLOCATION_TYPE_DEDICATED:
7796 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7798 VMA_ASSERT(CanBecomeLost());
7804 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7807 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7812 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7818 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7828 #if VMA_STATS_STRING_ENABLED
7831 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7840 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
7842 json.WriteString(
"Type");
7843 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7845 json.WriteString(
"Size");
7846 json.WriteNumber(m_Size);
7848 if(m_pUserData != VMA_NULL)
7850 json.WriteString(
"UserData");
7851 if(IsUserDataString())
7853 json.WriteString((
const char*)m_pUserData);
7858 json.ContinueString_Pointer(m_pUserData);
7863 json.WriteString(
"CreationFrameIndex");
7864 json.WriteNumber(m_CreationFrameIndex);
7866 json.WriteString(
"LastUseFrameIndex");
7867 json.WriteNumber(GetLastUseFrameIndex());
7869 if(m_BufferImageUsage != 0)
7871 json.WriteString(
"Usage");
7872 json.WriteNumber(m_BufferImageUsage);
7878 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7880 VMA_ASSERT(IsUserDataString());
7881 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
7882 m_pUserData = VMA_NULL;
7885 void VmaAllocation_T::BlockAllocMap()
7887 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7889 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7895 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7899 void VmaAllocation_T::BlockAllocUnmap()
7901 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7903 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7909 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7913 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7915 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7919 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7921 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7922 *ppData = m_DedicatedAllocation.m_pMappedData;
7928 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7929 return VK_ERROR_MEMORY_MAP_FAILED;
7934 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7935 hAllocator->m_hDevice,
7936 m_DedicatedAllocation.m_hMemory,
7941 if(result == VK_SUCCESS)
7943 m_DedicatedAllocation.m_pMappedData = *ppData;
7950 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7952 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7954 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7959 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7960 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7961 hAllocator->m_hDevice,
7962 m_DedicatedAllocation.m_hMemory);
7967 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7971 #if VMA_STATS_STRING_ENABLED
7973 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7977 json.WriteString(
"Blocks");
7980 json.WriteString(
"Allocations");
7983 json.WriteString(
"UnusedRanges");
7986 json.WriteString(
"UsedBytes");
7989 json.WriteString(
"UnusedBytes");
7994 json.WriteString(
"AllocationSize");
7995 json.BeginObject(
true);
7996 json.WriteString(
"Min");
7998 json.WriteString(
"Avg");
8000 json.WriteString(
"Max");
8007 json.WriteString(
"UnusedRangeSize");
8008 json.BeginObject(
true);
8009 json.WriteString(
"Min");
8011 json.WriteString(
"Avg");
8013 json.WriteString(
"Max");
8021 #endif // #if VMA_STATS_STRING_ENABLED
8023 struct VmaSuballocationItemSizeLess
8026 const VmaSuballocationList::iterator lhs,
8027 const VmaSuballocationList::iterator rhs)
const
8029 return lhs->size < rhs->size;
8032 const VmaSuballocationList::iterator lhs,
8033 VkDeviceSize rhsSize)
const
8035 return lhs->size < rhsSize;
8043 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8045 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8049 #if VMA_STATS_STRING_ENABLED
8051 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8052 VkDeviceSize unusedBytes,
8053 size_t allocationCount,
8054 size_t unusedRangeCount)
const
8058 json.WriteString(
"TotalBytes");
8059 json.WriteNumber(GetSize());
8061 json.WriteString(
"UnusedBytes");
8062 json.WriteNumber(unusedBytes);
8064 json.WriteString(
"Allocations");
8065 json.WriteNumber((uint64_t)allocationCount);
8067 json.WriteString(
"UnusedRanges");
8068 json.WriteNumber((uint64_t)unusedRangeCount);
8070 json.WriteString(
"Suballocations");
8074 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8075 VkDeviceSize offset,
8078 json.BeginObject(
true);
8080 json.WriteString(
"Offset");
8081 json.WriteNumber(offset);
8083 hAllocation->PrintParameters(json);
8088 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8089 VkDeviceSize offset,
8090 VkDeviceSize size)
const
8092 json.BeginObject(
true);
8094 json.WriteString(
"Offset");
8095 json.WriteNumber(offset);
8097 json.WriteString(
"Type");
8098 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8100 json.WriteString(
"Size");
8101 json.WriteNumber(size);
8106 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8112 #endif // #if VMA_STATS_STRING_ENABLED
8117 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8118 VmaBlockMetadata(hAllocator),
8121 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8122 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8126 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8130 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8132 VmaBlockMetadata::Init(size);
8135 m_SumFreeSize = size;
8137 VmaSuballocation suballoc = {};
8138 suballoc.offset = 0;
8139 suballoc.size = size;
8140 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8141 suballoc.hAllocation = VK_NULL_HANDLE;
8143 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8144 m_Suballocations.push_back(suballoc);
8145 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8147 m_FreeSuballocationsBySize.push_back(suballocItem);
8150 bool VmaBlockMetadata_Generic::Validate()
const
8152 VMA_VALIDATE(!m_Suballocations.empty());
8155 VkDeviceSize calculatedOffset = 0;
8157 uint32_t calculatedFreeCount = 0;
8159 VkDeviceSize calculatedSumFreeSize = 0;
8162 size_t freeSuballocationsToRegister = 0;
8164 bool prevFree =
false;
8166 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8167 suballocItem != m_Suballocations.cend();
8170 const VmaSuballocation& subAlloc = *suballocItem;
8173 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8175 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8177 VMA_VALIDATE(!prevFree || !currFree);
8179 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8183 calculatedSumFreeSize += subAlloc.size;
8184 ++calculatedFreeCount;
8185 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8187 ++freeSuballocationsToRegister;
8191 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8195 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8196 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8199 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8202 calculatedOffset += subAlloc.size;
8203 prevFree = currFree;
8208 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8210 VkDeviceSize lastSize = 0;
8211 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8213 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8216 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8218 VMA_VALIDATE(suballocItem->size >= lastSize);
8220 lastSize = suballocItem->size;
8224 VMA_VALIDATE(ValidateFreeSuballocationList());
8225 VMA_VALIDATE(calculatedOffset == GetSize());
8226 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8227 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8232 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8234 if(!m_FreeSuballocationsBySize.empty())
8236 return m_FreeSuballocationsBySize.back()->size;
8244 bool VmaBlockMetadata_Generic::IsEmpty()
const
8246 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8249 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8253 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8265 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8266 suballocItem != m_Suballocations.cend();
8269 const VmaSuballocation& suballoc = *suballocItem;
8270 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8283 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8285 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8287 inoutStats.
size += GetSize();
8294 #if VMA_STATS_STRING_ENABLED
8296 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8298 PrintDetailedMap_Begin(json,
8300 m_Suballocations.size() - (size_t)m_FreeCount,
8304 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8305 suballocItem != m_Suballocations.cend();
8306 ++suballocItem, ++i)
8308 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8310 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8314 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8318 PrintDetailedMap_End(json);
8321 #endif // #if VMA_STATS_STRING_ENABLED
8323 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8324 uint32_t currentFrameIndex,
8325 uint32_t frameInUseCount,
8326 VkDeviceSize bufferImageGranularity,
8327 VkDeviceSize allocSize,
8328 VkDeviceSize allocAlignment,
8330 VmaSuballocationType allocType,
8331 bool canMakeOtherLost,
8333 VmaAllocationRequest* pAllocationRequest)
8335 VMA_ASSERT(allocSize > 0);
8336 VMA_ASSERT(!upperAddress);
8337 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8338 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8339 VMA_HEAVY_ASSERT(Validate());
8341 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8344 if(canMakeOtherLost ==
false &&
8345 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8351 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8352 if(freeSuballocCount > 0)
8357 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8358 m_FreeSuballocationsBySize.data(),
8359 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8360 allocSize + 2 * VMA_DEBUG_MARGIN,
8361 VmaSuballocationItemSizeLess());
8362 size_t index = it - m_FreeSuballocationsBySize.data();
8363 for(; index < freeSuballocCount; ++index)
8368 bufferImageGranularity,
8372 m_FreeSuballocationsBySize[index],
8374 &pAllocationRequest->offset,
8375 &pAllocationRequest->itemsToMakeLostCount,
8376 &pAllocationRequest->sumFreeSize,
8377 &pAllocationRequest->sumItemSize))
8379 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8384 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8386 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8387 it != m_Suballocations.end();
8390 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8393 bufferImageGranularity,
8399 &pAllocationRequest->offset,
8400 &pAllocationRequest->itemsToMakeLostCount,
8401 &pAllocationRequest->sumFreeSize,
8402 &pAllocationRequest->sumItemSize))
8404 pAllocationRequest->item = it;
8412 for(
size_t index = freeSuballocCount; index--; )
8417 bufferImageGranularity,
8421 m_FreeSuballocationsBySize[index],
8423 &pAllocationRequest->offset,
8424 &pAllocationRequest->itemsToMakeLostCount,
8425 &pAllocationRequest->sumFreeSize,
8426 &pAllocationRequest->sumItemSize))
8428 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8435 if(canMakeOtherLost)
8440 VmaAllocationRequest tmpAllocRequest = {};
8441 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8442 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8443 suballocIt != m_Suballocations.end();
8446 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8447 suballocIt->hAllocation->CanBecomeLost())
8452 bufferImageGranularity,
8458 &tmpAllocRequest.offset,
8459 &tmpAllocRequest.itemsToMakeLostCount,
8460 &tmpAllocRequest.sumFreeSize,
8461 &tmpAllocRequest.sumItemSize))
8465 *pAllocationRequest = tmpAllocRequest;
8466 pAllocationRequest->item = suballocIt;
8469 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8471 *pAllocationRequest = tmpAllocRequest;
8472 pAllocationRequest->item = suballocIt;
8485 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8486 uint32_t currentFrameIndex,
8487 uint32_t frameInUseCount,
8488 VmaAllocationRequest* pAllocationRequest)
8490 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8492 while(pAllocationRequest->itemsToMakeLostCount > 0)
8494 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8496 ++pAllocationRequest->item;
8498 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8499 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8500 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8501 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8503 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8504 --pAllocationRequest->itemsToMakeLostCount;
8512 VMA_HEAVY_ASSERT(Validate());
8513 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8514 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8519 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8521 uint32_t lostAllocationCount = 0;
8522 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8523 it != m_Suballocations.end();
8526 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8527 it->hAllocation->CanBecomeLost() &&
8528 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8530 it = FreeSuballocation(it);
8531 ++lostAllocationCount;
8534 return lostAllocationCount;
8537 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8539 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8540 it != m_Suballocations.end();
8543 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8545 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8547 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8548 return VK_ERROR_VALIDATION_FAILED_EXT;
8550 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8552 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8553 return VK_ERROR_VALIDATION_FAILED_EXT;
8561 void VmaBlockMetadata_Generic::Alloc(
8562 const VmaAllocationRequest& request,
8563 VmaSuballocationType type,
8564 VkDeviceSize allocSize,
8567 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8568 VMA_ASSERT(request.item != m_Suballocations.end());
8569 VmaSuballocation& suballoc = *request.item;
8571 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8573 VMA_ASSERT(request.offset >= suballoc.offset);
8574 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8575 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8576 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8580 UnregisterFreeSuballocation(request.item);
8582 suballoc.offset = request.offset;
8583 suballoc.size = allocSize;
8584 suballoc.type = type;
8585 suballoc.hAllocation = hAllocation;
8590 VmaSuballocation paddingSuballoc = {};
8591 paddingSuballoc.offset = request.offset + allocSize;
8592 paddingSuballoc.size = paddingEnd;
8593 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8594 VmaSuballocationList::iterator next = request.item;
8596 const VmaSuballocationList::iterator paddingEndItem =
8597 m_Suballocations.insert(next, paddingSuballoc);
8598 RegisterFreeSuballocation(paddingEndItem);
8604 VmaSuballocation paddingSuballoc = {};
8605 paddingSuballoc.offset = request.offset - paddingBegin;
8606 paddingSuballoc.size = paddingBegin;
8607 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8608 const VmaSuballocationList::iterator paddingBeginItem =
8609 m_Suballocations.insert(request.item, paddingSuballoc);
8610 RegisterFreeSuballocation(paddingBeginItem);
8614 m_FreeCount = m_FreeCount - 1;
8615 if(paddingBegin > 0)
8623 m_SumFreeSize -= allocSize;
8626 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8628 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8629 suballocItem != m_Suballocations.end();
8632 VmaSuballocation& suballoc = *suballocItem;
8633 if(suballoc.hAllocation == allocation)
8635 FreeSuballocation(suballocItem);
8636 VMA_HEAVY_ASSERT(Validate());
8640 VMA_ASSERT(0 &&
"Not found!");
8643 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8645 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8646 suballocItem != m_Suballocations.end();
8649 VmaSuballocation& suballoc = *suballocItem;
8650 if(suballoc.offset == offset)
8652 FreeSuballocation(suballocItem);
8656 VMA_ASSERT(0 &&
"Not found!");
8659 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8661 VkDeviceSize lastSize = 0;
8662 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8664 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8666 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8667 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8668 VMA_VALIDATE(it->size >= lastSize);
8669 lastSize = it->size;
8674 bool VmaBlockMetadata_Generic::CheckAllocation(
8675 uint32_t currentFrameIndex,
8676 uint32_t frameInUseCount,
8677 VkDeviceSize bufferImageGranularity,
8678 VkDeviceSize allocSize,
8679 VkDeviceSize allocAlignment,
8680 VmaSuballocationType allocType,
8681 VmaSuballocationList::const_iterator suballocItem,
8682 bool canMakeOtherLost,
8683 VkDeviceSize* pOffset,
8684 size_t* itemsToMakeLostCount,
8685 VkDeviceSize* pSumFreeSize,
8686 VkDeviceSize* pSumItemSize)
const
8688 VMA_ASSERT(allocSize > 0);
8689 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8690 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8691 VMA_ASSERT(pOffset != VMA_NULL);
8693 *itemsToMakeLostCount = 0;
8697 if(canMakeOtherLost)
8699 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8701 *pSumFreeSize = suballocItem->size;
8705 if(suballocItem->hAllocation->CanBecomeLost() &&
8706 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8708 ++*itemsToMakeLostCount;
8709 *pSumItemSize = suballocItem->size;
8718 if(GetSize() - suballocItem->offset < allocSize)
8724 *pOffset = suballocItem->offset;
8727 if(VMA_DEBUG_MARGIN > 0)
8729 *pOffset += VMA_DEBUG_MARGIN;
8733 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8737 if(bufferImageGranularity > 1)
8739 bool bufferImageGranularityConflict =
false;
8740 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8741 while(prevSuballocItem != m_Suballocations.cbegin())
8744 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8745 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8747 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8749 bufferImageGranularityConflict =
true;
8757 if(bufferImageGranularityConflict)
8759 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8765 if(*pOffset >= suballocItem->offset + suballocItem->size)
8771 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8774 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8776 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8778 if(suballocItem->offset + totalSize > GetSize())
8785 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8786 if(totalSize > suballocItem->size)
8788 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8789 while(remainingSize > 0)
8792 if(lastSuballocItem == m_Suballocations.cend())
8796 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8798 *pSumFreeSize += lastSuballocItem->size;
8802 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8803 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8804 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8806 ++*itemsToMakeLostCount;
8807 *pSumItemSize += lastSuballocItem->size;
8814 remainingSize = (lastSuballocItem->size < remainingSize) ?
8815 remainingSize - lastSuballocItem->size : 0;
8821 if(bufferImageGranularity > 1)
8823 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8825 while(nextSuballocItem != m_Suballocations.cend())
8827 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8828 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8830 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8832 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8833 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8834 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8836 ++*itemsToMakeLostCount;
8855 const VmaSuballocation& suballoc = *suballocItem;
8856 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8858 *pSumFreeSize = suballoc.size;
8861 if(suballoc.size < allocSize)
8867 *pOffset = suballoc.offset;
8870 if(VMA_DEBUG_MARGIN > 0)
8872 *pOffset += VMA_DEBUG_MARGIN;
8876 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8880 if(bufferImageGranularity > 1)
8882 bool bufferImageGranularityConflict =
false;
8883 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8884 while(prevSuballocItem != m_Suballocations.cbegin())
8887 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8888 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8890 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8892 bufferImageGranularityConflict =
true;
8900 if(bufferImageGranularityConflict)
8902 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8907 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8910 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8913 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8920 if(bufferImageGranularity > 1)
8922 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8924 while(nextSuballocItem != m_Suballocations.cend())
8926 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8927 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8929 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8948 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8950 VMA_ASSERT(item != m_Suballocations.end());
8951 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8953 VmaSuballocationList::iterator nextItem = item;
8955 VMA_ASSERT(nextItem != m_Suballocations.end());
8956 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8958 item->size += nextItem->size;
8960 m_Suballocations.erase(nextItem);
8963 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8966 VmaSuballocation& suballoc = *suballocItem;
8967 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8968 suballoc.hAllocation = VK_NULL_HANDLE;
8972 m_SumFreeSize += suballoc.size;
8975 bool mergeWithNext =
false;
8976 bool mergeWithPrev =
false;
8978 VmaSuballocationList::iterator nextItem = suballocItem;
8980 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8982 mergeWithNext =
true;
8985 VmaSuballocationList::iterator prevItem = suballocItem;
8986 if(suballocItem != m_Suballocations.begin())
8989 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8991 mergeWithPrev =
true;
8997 UnregisterFreeSuballocation(nextItem);
8998 MergeFreeWithNext(suballocItem);
9003 UnregisterFreeSuballocation(prevItem);
9004 MergeFreeWithNext(prevItem);
9005 RegisterFreeSuballocation(prevItem);
9010 RegisterFreeSuballocation(suballocItem);
9011 return suballocItem;
9015 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9017 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9018 VMA_ASSERT(item->size > 0);
9022 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9024 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9026 if(m_FreeSuballocationsBySize.empty())
9028 m_FreeSuballocationsBySize.push_back(item);
9032 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9040 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9042 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9043 VMA_ASSERT(item->size > 0);
9047 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9049 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9051 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9052 m_FreeSuballocationsBySize.data(),
9053 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9055 VmaSuballocationItemSizeLess());
9056 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9057 index < m_FreeSuballocationsBySize.size();
9060 if(m_FreeSuballocationsBySize[index] == item)
9062 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9065 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9067 VMA_ASSERT(0 &&
"Not found.");
9073 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9074 VkDeviceSize bufferImageGranularity,
9075 VmaSuballocationType& inOutPrevSuballocType)
const
9077 if(bufferImageGranularity == 1 || IsEmpty())
9082 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9083 bool typeConflictFound =
false;
9084 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9085 it != m_Suballocations.cend();
9088 const VmaSuballocationType suballocType = it->type;
9089 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9091 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9092 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9094 typeConflictFound =
true;
9096 inOutPrevSuballocType = suballocType;
9100 return typeConflictFound || minAlignment >= bufferImageGranularity;
9106 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9107 VmaBlockMetadata(hAllocator),
9109 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9110 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9111 m_1stVectorIndex(0),
9112 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9113 m_1stNullItemsBeginCount(0),
9114 m_1stNullItemsMiddleCount(0),
9115 m_2ndNullItemsCount(0)
9119 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9123 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9125 VmaBlockMetadata::Init(size);
9126 m_SumFreeSize = size;
9129 bool VmaBlockMetadata_Linear::Validate()
const
9131 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9132 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9134 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9135 VMA_VALIDATE(!suballocations1st.empty() ||
9136 suballocations2nd.empty() ||
9137 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9139 if(!suballocations1st.empty())
9142 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9144 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9146 if(!suballocations2nd.empty())
9149 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9152 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9153 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9155 VkDeviceSize sumUsedSize = 0;
9156 const size_t suballoc1stCount = suballocations1st.size();
9157 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9159 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9161 const size_t suballoc2ndCount = suballocations2nd.size();
9162 size_t nullItem2ndCount = 0;
9163 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9165 const VmaSuballocation& suballoc = suballocations2nd[i];
9166 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9168 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9169 VMA_VALIDATE(suballoc.offset >= offset);
9173 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9174 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9175 sumUsedSize += suballoc.size;
9182 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9185 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9188 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9190 const VmaSuballocation& suballoc = suballocations1st[i];
9191 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9192 suballoc.hAllocation == VK_NULL_HANDLE);
9195 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9197 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9199 const VmaSuballocation& suballoc = suballocations1st[i];
9200 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9202 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9203 VMA_VALIDATE(suballoc.offset >= offset);
9204 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9208 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9209 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9210 sumUsedSize += suballoc.size;
9217 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9219 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9221 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9223 const size_t suballoc2ndCount = suballocations2nd.size();
9224 size_t nullItem2ndCount = 0;
9225 for(
size_t i = suballoc2ndCount; i--; )
9227 const VmaSuballocation& suballoc = suballocations2nd[i];
9228 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9230 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9231 VMA_VALIDATE(suballoc.offset >= offset);
9235 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9236 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9237 sumUsedSize += suballoc.size;
9244 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9247 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9250 VMA_VALIDATE(offset <= GetSize());
9251 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9256 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9258 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9259 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9262 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9264 const VkDeviceSize size = GetSize();
9276 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9278 switch(m_2ndVectorMode)
9280 case SECOND_VECTOR_EMPTY:
9286 const size_t suballocations1stCount = suballocations1st.size();
9287 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9288 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9289 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9291 firstSuballoc.offset,
9292 size - (lastSuballoc.offset + lastSuballoc.size));
9296 case SECOND_VECTOR_RING_BUFFER:
9301 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9302 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9303 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9304 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9308 case SECOND_VECTOR_DOUBLE_STACK:
9313 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9314 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9315 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9316 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9326 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9328 const VkDeviceSize size = GetSize();
9329 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9330 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9331 const size_t suballoc1stCount = suballocations1st.size();
9332 const size_t suballoc2ndCount = suballocations2nd.size();
9343 VkDeviceSize lastOffset = 0;
9345 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9347 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9348 size_t nextAlloc2ndIndex = 0;
9349 while(lastOffset < freeSpace2ndTo1stEnd)
9352 while(nextAlloc2ndIndex < suballoc2ndCount &&
9353 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9355 ++nextAlloc2ndIndex;
9359 if(nextAlloc2ndIndex < suballoc2ndCount)
9361 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9364 if(lastOffset < suballoc.offset)
9367 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9381 lastOffset = suballoc.offset + suballoc.size;
9382 ++nextAlloc2ndIndex;
9388 if(lastOffset < freeSpace2ndTo1stEnd)
9390 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9398 lastOffset = freeSpace2ndTo1stEnd;
9403 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9404 const VkDeviceSize freeSpace1stTo2ndEnd =
9405 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9406 while(lastOffset < freeSpace1stTo2ndEnd)
9409 while(nextAlloc1stIndex < suballoc1stCount &&
9410 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9412 ++nextAlloc1stIndex;
9416 if(nextAlloc1stIndex < suballoc1stCount)
9418 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9421 if(lastOffset < suballoc.offset)
9424 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9438 lastOffset = suballoc.offset + suballoc.size;
9439 ++nextAlloc1stIndex;
9445 if(lastOffset < freeSpace1stTo2ndEnd)
9447 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9455 lastOffset = freeSpace1stTo2ndEnd;
9459 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9461 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9462 while(lastOffset < size)
9465 while(nextAlloc2ndIndex != SIZE_MAX &&
9466 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9468 --nextAlloc2ndIndex;
9472 if(nextAlloc2ndIndex != SIZE_MAX)
9474 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9477 if(lastOffset < suballoc.offset)
9480 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9494 lastOffset = suballoc.offset + suballoc.size;
9495 --nextAlloc2ndIndex;
9501 if(lastOffset < size)
9503 const VkDeviceSize unusedRangeSize = size - lastOffset;
9519 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9521 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9522 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9523 const VkDeviceSize size = GetSize();
9524 const size_t suballoc1stCount = suballocations1st.size();
9525 const size_t suballoc2ndCount = suballocations2nd.size();
9527 inoutStats.
size += size;
9529 VkDeviceSize lastOffset = 0;
9531 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9533 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9534 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9535 while(lastOffset < freeSpace2ndTo1stEnd)
9538 while(nextAlloc2ndIndex < suballoc2ndCount &&
9539 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9541 ++nextAlloc2ndIndex;
9545 if(nextAlloc2ndIndex < suballoc2ndCount)
9547 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9550 if(lastOffset < suballoc.offset)
9553 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9564 lastOffset = suballoc.offset + suballoc.size;
9565 ++nextAlloc2ndIndex;
9570 if(lastOffset < freeSpace2ndTo1stEnd)
9573 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9580 lastOffset = freeSpace2ndTo1stEnd;
9585 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9586 const VkDeviceSize freeSpace1stTo2ndEnd =
9587 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9588 while(lastOffset < freeSpace1stTo2ndEnd)
9591 while(nextAlloc1stIndex < suballoc1stCount &&
9592 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9594 ++nextAlloc1stIndex;
9598 if(nextAlloc1stIndex < suballoc1stCount)
9600 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9603 if(lastOffset < suballoc.offset)
9606 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9617 lastOffset = suballoc.offset + suballoc.size;
9618 ++nextAlloc1stIndex;
9623 if(lastOffset < freeSpace1stTo2ndEnd)
9626 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9633 lastOffset = freeSpace1stTo2ndEnd;
9637 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9639 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9640 while(lastOffset < size)
9643 while(nextAlloc2ndIndex != SIZE_MAX &&
9644 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9646 --nextAlloc2ndIndex;
9650 if(nextAlloc2ndIndex != SIZE_MAX)
9652 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9655 if(lastOffset < suballoc.offset)
9658 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9669 lastOffset = suballoc.offset + suballoc.size;
9670 --nextAlloc2ndIndex;
9675 if(lastOffset < size)
9678 const VkDeviceSize unusedRangeSize = size - lastOffset;
9691 #if VMA_STATS_STRING_ENABLED
9692 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9694 const VkDeviceSize size = GetSize();
9695 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9696 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9697 const size_t suballoc1stCount = suballocations1st.size();
9698 const size_t suballoc2ndCount = suballocations2nd.size();
9702 size_t unusedRangeCount = 0;
9703 VkDeviceSize usedBytes = 0;
9705 VkDeviceSize lastOffset = 0;
9707 size_t alloc2ndCount = 0;
9708 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9710 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9711 size_t nextAlloc2ndIndex = 0;
9712 while(lastOffset < freeSpace2ndTo1stEnd)
9715 while(nextAlloc2ndIndex < suballoc2ndCount &&
9716 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9718 ++nextAlloc2ndIndex;
9722 if(nextAlloc2ndIndex < suballoc2ndCount)
9724 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9727 if(lastOffset < suballoc.offset)
9736 usedBytes += suballoc.size;
9739 lastOffset = suballoc.offset + suballoc.size;
9740 ++nextAlloc2ndIndex;
9745 if(lastOffset < freeSpace2ndTo1stEnd)
9752 lastOffset = freeSpace2ndTo1stEnd;
9757 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9758 size_t alloc1stCount = 0;
9759 const VkDeviceSize freeSpace1stTo2ndEnd =
9760 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9761 while(lastOffset < freeSpace1stTo2ndEnd)
9764 while(nextAlloc1stIndex < suballoc1stCount &&
9765 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9767 ++nextAlloc1stIndex;
9771 if(nextAlloc1stIndex < suballoc1stCount)
9773 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9776 if(lastOffset < suballoc.offset)
9785 usedBytes += suballoc.size;
9788 lastOffset = suballoc.offset + suballoc.size;
9789 ++nextAlloc1stIndex;
9794 if(lastOffset < size)
9801 lastOffset = freeSpace1stTo2ndEnd;
9805 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9807 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9808 while(lastOffset < size)
9811 while(nextAlloc2ndIndex != SIZE_MAX &&
9812 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9814 --nextAlloc2ndIndex;
9818 if(nextAlloc2ndIndex != SIZE_MAX)
9820 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9823 if(lastOffset < suballoc.offset)
9832 usedBytes += suballoc.size;
9835 lastOffset = suballoc.offset + suballoc.size;
9836 --nextAlloc2ndIndex;
9841 if(lastOffset < size)
9853 const VkDeviceSize unusedBytes = size - usedBytes;
9854 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9859 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9861 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9862 size_t nextAlloc2ndIndex = 0;
9863 while(lastOffset < freeSpace2ndTo1stEnd)
9866 while(nextAlloc2ndIndex < suballoc2ndCount &&
9867 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9869 ++nextAlloc2ndIndex;
9873 if(nextAlloc2ndIndex < suballoc2ndCount)
9875 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9878 if(lastOffset < suballoc.offset)
9881 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9882 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9887 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9890 lastOffset = suballoc.offset + suballoc.size;
9891 ++nextAlloc2ndIndex;
9896 if(lastOffset < freeSpace2ndTo1stEnd)
9899 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9900 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9904 lastOffset = freeSpace2ndTo1stEnd;
9909 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9910 while(lastOffset < freeSpace1stTo2ndEnd)
9913 while(nextAlloc1stIndex < suballoc1stCount &&
9914 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9916 ++nextAlloc1stIndex;
9920 if(nextAlloc1stIndex < suballoc1stCount)
9922 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9925 if(lastOffset < suballoc.offset)
9928 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9929 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9934 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9937 lastOffset = suballoc.offset + suballoc.size;
9938 ++nextAlloc1stIndex;
9943 if(lastOffset < freeSpace1stTo2ndEnd)
9946 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9947 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9951 lastOffset = freeSpace1stTo2ndEnd;
9955 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9957 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9958 while(lastOffset < size)
9961 while(nextAlloc2ndIndex != SIZE_MAX &&
9962 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9964 --nextAlloc2ndIndex;
9968 if(nextAlloc2ndIndex != SIZE_MAX)
9970 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9973 if(lastOffset < suballoc.offset)
9976 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9977 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9982 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9985 lastOffset = suballoc.offset + suballoc.size;
9986 --nextAlloc2ndIndex;
9991 if(lastOffset < size)
9994 const VkDeviceSize unusedRangeSize = size - lastOffset;
9995 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10004 PrintDetailedMap_End(json);
10006 #endif // #if VMA_STATS_STRING_ENABLED
10008 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10009 uint32_t currentFrameIndex,
10010 uint32_t frameInUseCount,
10011 VkDeviceSize bufferImageGranularity,
10012 VkDeviceSize allocSize,
10013 VkDeviceSize allocAlignment,
10015 VmaSuballocationType allocType,
10016 bool canMakeOtherLost,
10018 VmaAllocationRequest* pAllocationRequest)
10020 VMA_ASSERT(allocSize > 0);
10021 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10022 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10023 VMA_HEAVY_ASSERT(Validate());
10024 return upperAddress ?
10025 CreateAllocationRequest_UpperAddress(
10026 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10027 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10028 CreateAllocationRequest_LowerAddress(
10029 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10030 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10033 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10034 uint32_t currentFrameIndex,
10035 uint32_t frameInUseCount,
10036 VkDeviceSize bufferImageGranularity,
10037 VkDeviceSize allocSize,
10038 VkDeviceSize allocAlignment,
10039 VmaSuballocationType allocType,
10040 bool canMakeOtherLost,
10042 VmaAllocationRequest* pAllocationRequest)
10044 const VkDeviceSize size = GetSize();
10045 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10046 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10048 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10050 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10055 if(allocSize > size)
10059 VkDeviceSize resultBaseOffset = size - allocSize;
10060 if(!suballocations2nd.empty())
10062 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10063 resultBaseOffset = lastSuballoc.offset - allocSize;
10064 if(allocSize > lastSuballoc.offset)
10071 VkDeviceSize resultOffset = resultBaseOffset;
10074 if(VMA_DEBUG_MARGIN > 0)
10076 if(resultOffset < VMA_DEBUG_MARGIN)
10080 resultOffset -= VMA_DEBUG_MARGIN;
10084 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10088 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10090 bool bufferImageGranularityConflict =
false;
10091 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10093 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10094 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10096 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10098 bufferImageGranularityConflict =
true;
10106 if(bufferImageGranularityConflict)
10108 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10113 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10114 suballocations1st.back().offset + suballocations1st.back().size :
10116 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10120 if(bufferImageGranularity > 1)
10122 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10124 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10125 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10127 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10141 pAllocationRequest->offset = resultOffset;
10142 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10143 pAllocationRequest->sumItemSize = 0;
10145 pAllocationRequest->itemsToMakeLostCount = 0;
10146 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10153 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10154 uint32_t currentFrameIndex,
10155 uint32_t frameInUseCount,
10156 VkDeviceSize bufferImageGranularity,
10157 VkDeviceSize allocSize,
10158 VkDeviceSize allocAlignment,
10159 VmaSuballocationType allocType,
10160 bool canMakeOtherLost,
10162 VmaAllocationRequest* pAllocationRequest)
10164 const VkDeviceSize size = GetSize();
10165 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10166 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10168 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10172 VkDeviceSize resultBaseOffset = 0;
10173 if(!suballocations1st.empty())
10175 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10176 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10180 VkDeviceSize resultOffset = resultBaseOffset;
10183 if(VMA_DEBUG_MARGIN > 0)
10185 resultOffset += VMA_DEBUG_MARGIN;
10189 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10193 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10195 bool bufferImageGranularityConflict =
false;
10196 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10198 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10199 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10201 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10203 bufferImageGranularityConflict =
true;
10211 if(bufferImageGranularityConflict)
10213 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10217 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10218 suballocations2nd.back().offset : size;
10221 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10225 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10227 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10229 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10230 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10232 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10246 pAllocationRequest->offset = resultOffset;
10247 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10248 pAllocationRequest->sumItemSize = 0;
10250 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10251 pAllocationRequest->itemsToMakeLostCount = 0;
10258 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10260 VMA_ASSERT(!suballocations1st.empty());
10262 VkDeviceSize resultBaseOffset = 0;
10263 if(!suballocations2nd.empty())
10265 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10266 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10270 VkDeviceSize resultOffset = resultBaseOffset;
10273 if(VMA_DEBUG_MARGIN > 0)
10275 resultOffset += VMA_DEBUG_MARGIN;
10279 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10283 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10285 bool bufferImageGranularityConflict =
false;
10286 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10288 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10289 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10291 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10293 bufferImageGranularityConflict =
true;
10301 if(bufferImageGranularityConflict)
10303 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10307 pAllocationRequest->itemsToMakeLostCount = 0;
10308 pAllocationRequest->sumItemSize = 0;
10309 size_t index1st = m_1stNullItemsBeginCount;
10311 if(canMakeOtherLost)
10313 while(index1st < suballocations1st.size() &&
10314 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10317 const VmaSuballocation& suballoc = suballocations1st[index1st];
10318 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10324 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10325 if(suballoc.hAllocation->CanBecomeLost() &&
10326 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10328 ++pAllocationRequest->itemsToMakeLostCount;
10329 pAllocationRequest->sumItemSize += suballoc.size;
10341 if(bufferImageGranularity > 1)
10343 while(index1st < suballocations1st.size())
10345 const VmaSuballocation& suballoc = suballocations1st[index1st];
10346 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10348 if(suballoc.hAllocation != VK_NULL_HANDLE)
10351 if(suballoc.hAllocation->CanBecomeLost() &&
10352 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10354 ++pAllocationRequest->itemsToMakeLostCount;
10355 pAllocationRequest->sumItemSize += suballoc.size;
10373 if(index1st == suballocations1st.size() &&
10374 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10377 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10382 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10383 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10387 if(bufferImageGranularity > 1)
10389 for(
size_t nextSuballocIndex = index1st;
10390 nextSuballocIndex < suballocations1st.size();
10391 nextSuballocIndex++)
10393 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10394 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10396 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10410 pAllocationRequest->offset = resultOffset;
10411 pAllocationRequest->sumFreeSize =
10412 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10414 - pAllocationRequest->sumItemSize;
10415 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10424 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10425 uint32_t currentFrameIndex,
10426 uint32_t frameInUseCount,
10427 VmaAllocationRequest* pAllocationRequest)
10429 if(pAllocationRequest->itemsToMakeLostCount == 0)
10434 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10437 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10438 size_t index = m_1stNullItemsBeginCount;
10439 size_t madeLostCount = 0;
10440 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10442 if(index == suballocations->size())
10446 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10448 suballocations = &AccessSuballocations2nd();
10452 VMA_ASSERT(!suballocations->empty());
10454 VmaSuballocation& suballoc = (*suballocations)[index];
10455 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10457 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10458 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10459 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10461 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10462 suballoc.hAllocation = VK_NULL_HANDLE;
10463 m_SumFreeSize += suballoc.size;
10464 if(suballocations == &AccessSuballocations1st())
10466 ++m_1stNullItemsMiddleCount;
10470 ++m_2ndNullItemsCount;
10482 CleanupAfterFree();
10488 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10490 uint32_t lostAllocationCount = 0;
10492 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10493 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10495 VmaSuballocation& suballoc = suballocations1st[i];
10496 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10497 suballoc.hAllocation->CanBecomeLost() &&
10498 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10500 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10501 suballoc.hAllocation = VK_NULL_HANDLE;
10502 ++m_1stNullItemsMiddleCount;
10503 m_SumFreeSize += suballoc.size;
10504 ++lostAllocationCount;
10508 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10509 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10511 VmaSuballocation& suballoc = suballocations2nd[i];
10512 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10513 suballoc.hAllocation->CanBecomeLost() &&
10514 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10516 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10517 suballoc.hAllocation = VK_NULL_HANDLE;
10518 ++m_2ndNullItemsCount;
10519 m_SumFreeSize += suballoc.size;
10520 ++lostAllocationCount;
10524 if(lostAllocationCount)
10526 CleanupAfterFree();
10529 return lostAllocationCount;
10532 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10534 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10535 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10537 const VmaSuballocation& suballoc = suballocations1st[i];
10538 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10540 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10542 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10543 return VK_ERROR_VALIDATION_FAILED_EXT;
10545 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10547 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10548 return VK_ERROR_VALIDATION_FAILED_EXT;
10553 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10554 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10556 const VmaSuballocation& suballoc = suballocations2nd[i];
10557 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10559 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10561 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10562 return VK_ERROR_VALIDATION_FAILED_EXT;
10564 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10566 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10567 return VK_ERROR_VALIDATION_FAILED_EXT;
10575 void VmaBlockMetadata_Linear::Alloc(
10576 const VmaAllocationRequest& request,
10577 VmaSuballocationType type,
10578 VkDeviceSize allocSize,
10581 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10583 switch(request.type)
10585 case VmaAllocationRequestType::UpperAddress:
10587 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10588 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10589 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10590 suballocations2nd.push_back(newSuballoc);
10591 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10594 case VmaAllocationRequestType::EndOf1st:
10596 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10598 VMA_ASSERT(suballocations1st.empty() ||
10599 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10601 VMA_ASSERT(request.offset + allocSize <= GetSize());
10603 suballocations1st.push_back(newSuballoc);
10606 case VmaAllocationRequestType::EndOf2nd:
10608 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10610 VMA_ASSERT(!suballocations1st.empty() &&
10611 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10612 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10614 switch(m_2ndVectorMode)
10616 case SECOND_VECTOR_EMPTY:
10618 VMA_ASSERT(suballocations2nd.empty());
10619 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10621 case SECOND_VECTOR_RING_BUFFER:
10623 VMA_ASSERT(!suballocations2nd.empty());
10625 case SECOND_VECTOR_DOUBLE_STACK:
10626 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10632 suballocations2nd.push_back(newSuballoc);
10636 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10639 m_SumFreeSize -= newSuballoc.size;
10642 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10644 FreeAtOffset(allocation->GetOffset());
10647 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10649 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10650 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10652 if(!suballocations1st.empty())
10655 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10656 if(firstSuballoc.offset == offset)
10658 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10659 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10660 m_SumFreeSize += firstSuballoc.size;
10661 ++m_1stNullItemsBeginCount;
10662 CleanupAfterFree();
10668 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10669 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10671 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10672 if(lastSuballoc.offset == offset)
10674 m_SumFreeSize += lastSuballoc.size;
10675 suballocations2nd.pop_back();
10676 CleanupAfterFree();
10681 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10683 VmaSuballocation& lastSuballoc = suballocations1st.back();
10684 if(lastSuballoc.offset == offset)
10686 m_SumFreeSize += lastSuballoc.size;
10687 suballocations1st.pop_back();
10688 CleanupAfterFree();
10695 VmaSuballocation refSuballoc;
10696 refSuballoc.offset = offset;
10698 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10699 suballocations1st.begin() + m_1stNullItemsBeginCount,
10700 suballocations1st.end(),
10702 VmaSuballocationOffsetLess());
10703 if(it != suballocations1st.end())
10705 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10706 it->hAllocation = VK_NULL_HANDLE;
10707 ++m_1stNullItemsMiddleCount;
10708 m_SumFreeSize += it->size;
10709 CleanupAfterFree();
10714 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10717 VmaSuballocation refSuballoc;
10718 refSuballoc.offset = offset;
10720 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10721 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10722 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10723 if(it != suballocations2nd.end())
10725 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10726 it->hAllocation = VK_NULL_HANDLE;
10727 ++m_2ndNullItemsCount;
10728 m_SumFreeSize += it->size;
10729 CleanupAfterFree();
10734 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10737 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10739 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10740 const size_t suballocCount = AccessSuballocations1st().size();
10741 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10744 void VmaBlockMetadata_Linear::CleanupAfterFree()
10746 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10747 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10751 suballocations1st.clear();
10752 suballocations2nd.clear();
10753 m_1stNullItemsBeginCount = 0;
10754 m_1stNullItemsMiddleCount = 0;
10755 m_2ndNullItemsCount = 0;
10756 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10760 const size_t suballoc1stCount = suballocations1st.size();
10761 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10762 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10765 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10766 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10768 ++m_1stNullItemsBeginCount;
10769 --m_1stNullItemsMiddleCount;
10773 while(m_1stNullItemsMiddleCount > 0 &&
10774 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10776 --m_1stNullItemsMiddleCount;
10777 suballocations1st.pop_back();
10781 while(m_2ndNullItemsCount > 0 &&
10782 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10784 --m_2ndNullItemsCount;
10785 suballocations2nd.pop_back();
10789 while(m_2ndNullItemsCount > 0 &&
10790 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10792 --m_2ndNullItemsCount;
10793 VmaVectorRemove(suballocations2nd, 0);
10796 if(ShouldCompact1st())
10798 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10799 size_t srcIndex = m_1stNullItemsBeginCount;
10800 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10802 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10806 if(dstIndex != srcIndex)
10808 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10812 suballocations1st.resize(nonNullItemCount);
10813 m_1stNullItemsBeginCount = 0;
10814 m_1stNullItemsMiddleCount = 0;
10818 if(suballocations2nd.empty())
10820 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10824 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10826 suballocations1st.clear();
10827 m_1stNullItemsBeginCount = 0;
10829 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10832 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10833 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10834 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10835 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10837 ++m_1stNullItemsBeginCount;
10838 --m_1stNullItemsMiddleCount;
10840 m_2ndNullItemsCount = 0;
10841 m_1stVectorIndex ^= 1;
10846 VMA_HEAVY_ASSERT(Validate());
10853 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10854 VmaBlockMetadata(hAllocator),
10856 m_AllocationCount(0),
10860 memset(m_FreeList, 0,
sizeof(m_FreeList));
10863 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10865 DeleteNode(m_Root);
10868 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10870 VmaBlockMetadata::Init(size);
10872 m_UsableSize = VmaPrevPow2(size);
10873 m_SumFreeSize = m_UsableSize;
10877 while(m_LevelCount < MAX_LEVELS &&
10878 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10883 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10884 rootNode->offset = 0;
10885 rootNode->type = Node::TYPE_FREE;
10886 rootNode->parent = VMA_NULL;
10887 rootNode->buddy = VMA_NULL;
10890 AddToFreeListFront(0, rootNode);
10893 bool VmaBlockMetadata_Buddy::Validate()
const
10896 ValidationContext ctx;
10897 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10899 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10901 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10902 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10905 for(uint32_t level = 0; level < m_LevelCount; ++level)
10907 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10908 m_FreeList[level].front->free.prev == VMA_NULL);
10910 for(Node* node = m_FreeList[level].front;
10912 node = node->free.next)
10914 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10916 if(node->free.next == VMA_NULL)
10918 VMA_VALIDATE(m_FreeList[level].back == node);
10922 VMA_VALIDATE(node->free.next->free.prev == node);
10928 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10930 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10936 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
10938 for(uint32_t level = 0; level < m_LevelCount; ++level)
10940 if(m_FreeList[level].front != VMA_NULL)
10942 return LevelToNodeSize(level);
10948 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10950 const VkDeviceSize unusableSize = GetUnusableSize();
10961 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10963 if(unusableSize > 0)
10972 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
10974 const VkDeviceSize unusableSize = GetUnusableSize();
10976 inoutStats.
size += GetSize();
10977 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10982 if(unusableSize > 0)
10989 #if VMA_STATS_STRING_ENABLED
10991 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
10995 CalcAllocationStatInfo(stat);
10997 PrintDetailedMap_Begin(
11003 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11005 const VkDeviceSize unusableSize = GetUnusableSize();
11006 if(unusableSize > 0)
11008 PrintDetailedMap_UnusedRange(json,
11013 PrintDetailedMap_End(json);
11016 #endif // #if VMA_STATS_STRING_ENABLED
11018 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11019 uint32_t currentFrameIndex,
11020 uint32_t frameInUseCount,
11021 VkDeviceSize bufferImageGranularity,
11022 VkDeviceSize allocSize,
11023 VkDeviceSize allocAlignment,
11025 VmaSuballocationType allocType,
11026 bool canMakeOtherLost,
11028 VmaAllocationRequest* pAllocationRequest)
11030 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11034 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11035 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11036 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11038 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11039 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11042 if(allocSize > m_UsableSize)
11047 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11048 for(uint32_t level = targetLevel + 1; level--; )
11050 for(Node* freeNode = m_FreeList[level].front;
11051 freeNode != VMA_NULL;
11052 freeNode = freeNode->free.next)
11054 if(freeNode->offset % allocAlignment == 0)
11056 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11057 pAllocationRequest->offset = freeNode->offset;
11058 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11059 pAllocationRequest->sumItemSize = 0;
11060 pAllocationRequest->itemsToMakeLostCount = 0;
11061 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11070 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11071 uint32_t currentFrameIndex,
11072 uint32_t frameInUseCount,
11073 VmaAllocationRequest* pAllocationRequest)
11079 return pAllocationRequest->itemsToMakeLostCount == 0;
11082 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11091 void VmaBlockMetadata_Buddy::Alloc(
11092 const VmaAllocationRequest& request,
11093 VmaSuballocationType type,
11094 VkDeviceSize allocSize,
11097 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11099 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11100 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11102 Node* currNode = m_FreeList[currLevel].front;
11103 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11104 while(currNode->offset != request.offset)
11106 currNode = currNode->free.next;
11107 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11111 while(currLevel < targetLevel)
11115 RemoveFromFreeList(currLevel, currNode);
11117 const uint32_t childrenLevel = currLevel + 1;
11120 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11121 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11123 leftChild->offset = currNode->offset;
11124 leftChild->type = Node::TYPE_FREE;
11125 leftChild->parent = currNode;
11126 leftChild->buddy = rightChild;
11128 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11129 rightChild->type = Node::TYPE_FREE;
11130 rightChild->parent = currNode;
11131 rightChild->buddy = leftChild;
11134 currNode->type = Node::TYPE_SPLIT;
11135 currNode->split.leftChild = leftChild;
11138 AddToFreeListFront(childrenLevel, rightChild);
11139 AddToFreeListFront(childrenLevel, leftChild);
11144 currNode = m_FreeList[currLevel].front;
11153 VMA_ASSERT(currLevel == targetLevel &&
11154 currNode != VMA_NULL &&
11155 currNode->type == Node::TYPE_FREE);
11156 RemoveFromFreeList(currLevel, currNode);
11159 currNode->type = Node::TYPE_ALLOCATION;
11160 currNode->allocation.alloc = hAllocation;
11162 ++m_AllocationCount;
11164 m_SumFreeSize -= allocSize;
11167 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11169 if(node->type == Node::TYPE_SPLIT)
11171 DeleteNode(node->split.leftChild->buddy);
11172 DeleteNode(node->split.leftChild);
11175 vma_delete(GetAllocationCallbacks(), node);
11178 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11180 VMA_VALIDATE(level < m_LevelCount);
11181 VMA_VALIDATE(curr->parent == parent);
11182 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11183 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11186 case Node::TYPE_FREE:
11188 ctx.calculatedSumFreeSize += levelNodeSize;
11189 ++ctx.calculatedFreeCount;
11191 case Node::TYPE_ALLOCATION:
11192 ++ctx.calculatedAllocationCount;
11193 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11194 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11196 case Node::TYPE_SPLIT:
11198 const uint32_t childrenLevel = level + 1;
11199 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11200 const Node*
const leftChild = curr->split.leftChild;
11201 VMA_VALIDATE(leftChild != VMA_NULL);
11202 VMA_VALIDATE(leftChild->offset == curr->offset);
11203 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11205 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11207 const Node*
const rightChild = leftChild->buddy;
11208 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11209 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11211 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11222 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11225 uint32_t level = 0;
11226 VkDeviceSize currLevelNodeSize = m_UsableSize;
11227 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11228 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11231 currLevelNodeSize = nextLevelNodeSize;
11232 nextLevelNodeSize = currLevelNodeSize >> 1;
11237 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11240 Node* node = m_Root;
11241 VkDeviceSize nodeOffset = 0;
11242 uint32_t level = 0;
11243 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11244 while(node->type == Node::TYPE_SPLIT)
11246 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11247 if(offset < nodeOffset + nextLevelSize)
11249 node = node->split.leftChild;
11253 node = node->split.leftChild->buddy;
11254 nodeOffset += nextLevelSize;
11257 levelNodeSize = nextLevelSize;
11260 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11261 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11264 --m_AllocationCount;
11265 m_SumFreeSize += alloc->GetSize();
11267 node->type = Node::TYPE_FREE;
11270 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11272 RemoveFromFreeList(level, node->buddy);
11273 Node*
const parent = node->parent;
11275 vma_delete(GetAllocationCallbacks(), node->buddy);
11276 vma_delete(GetAllocationCallbacks(), node);
11277 parent->type = Node::TYPE_FREE;
11285 AddToFreeListFront(level, node);
11288 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11292 case Node::TYPE_FREE:
11298 case Node::TYPE_ALLOCATION:
11300 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11306 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11307 if(unusedRangeSize > 0)
11316 case Node::TYPE_SPLIT:
11318 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11319 const Node*
const leftChild = node->split.leftChild;
11320 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11321 const Node*
const rightChild = leftChild->buddy;
11322 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11330 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11332 VMA_ASSERT(node->type == Node::TYPE_FREE);
11335 Node*
const frontNode = m_FreeList[level].front;
11336 if(frontNode == VMA_NULL)
11338 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11339 node->free.prev = node->free.next = VMA_NULL;
11340 m_FreeList[level].front = m_FreeList[level].back = node;
11344 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11345 node->free.prev = VMA_NULL;
11346 node->free.next = frontNode;
11347 frontNode->free.prev = node;
11348 m_FreeList[level].front = node;
11352 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11354 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11357 if(node->free.prev == VMA_NULL)
11359 VMA_ASSERT(m_FreeList[level].front == node);
11360 m_FreeList[level].front = node->free.next;
11364 Node*
const prevFreeNode = node->free.prev;
11365 VMA_ASSERT(prevFreeNode->free.next == node);
11366 prevFreeNode->free.next = node->free.next;
11370 if(node->free.next == VMA_NULL)
11372 VMA_ASSERT(m_FreeList[level].back == node);
11373 m_FreeList[level].back = node->free.prev;
11377 Node*
const nextFreeNode = node->free.next;
11378 VMA_ASSERT(nextFreeNode->free.prev == node);
11379 nextFreeNode->free.prev = node->free.prev;
11383 #if VMA_STATS_STRING_ENABLED
11384 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11388 case Node::TYPE_FREE:
11389 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11391 case Node::TYPE_ALLOCATION:
11393 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11394 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11395 if(allocSize < levelNodeSize)
11397 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11401 case Node::TYPE_SPLIT:
11403 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11404 const Node*
const leftChild = node->split.leftChild;
11405 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11406 const Node*
const rightChild = leftChild->buddy;
11407 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11414 #endif // #if VMA_STATS_STRING_ENABLED
11420 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11421 m_pMetadata(VMA_NULL),
11422 m_MemoryTypeIndex(UINT32_MAX),
11424 m_hMemory(VK_NULL_HANDLE),
11426 m_pMappedData(VMA_NULL)
11430 void VmaDeviceMemoryBlock::Init(
11433 uint32_t newMemoryTypeIndex,
11434 VkDeviceMemory newMemory,
11435 VkDeviceSize newSize,
11437 uint32_t algorithm)
11439 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11441 m_hParentPool = hParentPool;
11442 m_MemoryTypeIndex = newMemoryTypeIndex;
11444 m_hMemory = newMemory;
11449 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11452 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11458 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11460 m_pMetadata->Init(newSize);
11463 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11467 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11469 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11470 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11471 m_hMemory = VK_NULL_HANDLE;
11473 vma_delete(allocator, m_pMetadata);
11474 m_pMetadata = VMA_NULL;
11477 bool VmaDeviceMemoryBlock::Validate()
const
11479 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11480 (m_pMetadata->GetSize() != 0));
11482 return m_pMetadata->Validate();
11485 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11487 void* pData =
nullptr;
11488 VkResult res = Map(hAllocator, 1, &pData);
11489 if(res != VK_SUCCESS)
11494 res = m_pMetadata->CheckCorruption(pData);
11496 Unmap(hAllocator, 1);
11501 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11508 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11509 if(m_MapCount != 0)
11511 m_MapCount += count;
11512 VMA_ASSERT(m_pMappedData != VMA_NULL);
11513 if(ppData != VMA_NULL)
11515 *ppData = m_pMappedData;
11521 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11522 hAllocator->m_hDevice,
11528 if(result == VK_SUCCESS)
11530 if(ppData != VMA_NULL)
11532 *ppData = m_pMappedData;
11534 m_MapCount = count;
11540 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11547 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11548 if(m_MapCount >= count)
11550 m_MapCount -= count;
11551 if(m_MapCount == 0)
11553 m_pMappedData = VMA_NULL;
11554 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11559 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11563 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11565 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11566 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11569 VkResult res = Map(hAllocator, 1, &pData);
11570 if(res != VK_SUCCESS)
11575 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11576 VmaWriteMagicValue(pData, allocOffset + allocSize);
11578 Unmap(hAllocator, 1);
11583 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11585 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11586 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11589 VkResult res = Map(hAllocator, 1, &pData);
11590 if(res != VK_SUCCESS)
11595 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11597 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11599 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11601 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11604 Unmap(hAllocator, 1);
11609 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11612 VkDeviceSize allocationLocalOffset,
11616 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11617 hAllocation->GetBlock() ==
this);
11618 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11619 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11620 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11622 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11623 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11626 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11629 VkDeviceSize allocationLocalOffset,
11633 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11634 hAllocation->GetBlock() ==
this);
11635 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11636 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11637 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11639 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11640 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11645 memset(&outInfo, 0,
sizeof(outInfo));
11664 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11672 VmaPool_T::VmaPool_T(
11675 VkDeviceSize preferredBlockSize) :
11679 createInfo.memoryTypeIndex,
11680 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11681 createInfo.minBlockCount,
11682 createInfo.maxBlockCount,
11684 createInfo.frameInUseCount,
11685 createInfo.blockSize != 0,
11692 VmaPool_T::~VmaPool_T()
11696 void VmaPool_T::SetName(
const char* pName)
11698 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
11699 VmaFreeString(allocs, m_Name);
11701 if(pName != VMA_NULL)
11703 m_Name = VmaCreateStringCopy(allocs, pName);
11711 #if VMA_STATS_STRING_ENABLED
11713 #endif // #if VMA_STATS_STRING_ENABLED
11715 VmaBlockVector::VmaBlockVector(
11718 uint32_t memoryTypeIndex,
11719 VkDeviceSize preferredBlockSize,
11720 size_t minBlockCount,
11721 size_t maxBlockCount,
11722 VkDeviceSize bufferImageGranularity,
11723 uint32_t frameInUseCount,
11724 bool explicitBlockSize,
11725 uint32_t algorithm) :
11726 m_hAllocator(hAllocator),
11727 m_hParentPool(hParentPool),
11728 m_MemoryTypeIndex(memoryTypeIndex),
11729 m_PreferredBlockSize(preferredBlockSize),
11730 m_MinBlockCount(minBlockCount),
11731 m_MaxBlockCount(maxBlockCount),
11732 m_BufferImageGranularity(bufferImageGranularity),
11733 m_FrameInUseCount(frameInUseCount),
11734 m_ExplicitBlockSize(explicitBlockSize),
11735 m_Algorithm(algorithm),
11736 m_HasEmptyBlock(false),
11737 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11742 VmaBlockVector::~VmaBlockVector()
11744 for(
size_t i = m_Blocks.size(); i--; )
11746 m_Blocks[i]->Destroy(m_hAllocator);
11747 vma_delete(m_hAllocator, m_Blocks[i]);
11751 VkResult VmaBlockVector::CreateMinBlocks()
11753 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11755 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11756 if(res != VK_SUCCESS)
11764 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11766 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11768 const size_t blockCount = m_Blocks.size();
11777 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11779 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11780 VMA_ASSERT(pBlock);
11781 VMA_HEAVY_ASSERT(pBlock->Validate());
11782 pBlock->m_pMetadata->AddPoolStats(*pStats);
11786 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
11788 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11789 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11790 (VMA_DEBUG_MARGIN > 0) &&
11792 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11795 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11797 VkResult VmaBlockVector::Allocate(
11798 uint32_t currentFrameIndex,
11800 VkDeviceSize alignment,
11802 VmaSuballocationType suballocType,
11803 size_t allocationCount,
11807 VkResult res = VK_SUCCESS;
11809 if(IsCorruptionDetectionEnabled())
11811 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11812 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11816 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11817 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11819 res = AllocatePage(
11825 pAllocations + allocIndex);
11826 if(res != VK_SUCCESS)
11833 if(res != VK_SUCCESS)
11836 while(allocIndex--)
11838 Free(pAllocations[allocIndex]);
11840 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11846 VkResult VmaBlockVector::AllocatePage(
11847 uint32_t currentFrameIndex,
11849 VkDeviceSize alignment,
11851 VmaSuballocationType suballocType,
11860 VkDeviceSize freeMemory;
11862 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
11864 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
11868 const bool canFallbackToDedicated = !IsCustomPool();
11869 const bool canCreateNewBlock =
11871 (m_Blocks.size() < m_MaxBlockCount) &&
11872 (freeMemory >= size || !canFallbackToDedicated);
11879 canMakeOtherLost =
false;
11883 if(isUpperAddress &&
11886 return VK_ERROR_FEATURE_NOT_PRESENT;
11900 return VK_ERROR_FEATURE_NOT_PRESENT;
11904 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11906 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11914 if(!canMakeOtherLost || canCreateNewBlock)
11923 if(!m_Blocks.empty())
11925 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11926 VMA_ASSERT(pCurrBlock);
11927 VkResult res = AllocateFromBlock(
11937 if(res == VK_SUCCESS)
11939 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
11949 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11951 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11952 VMA_ASSERT(pCurrBlock);
11953 VkResult res = AllocateFromBlock(
11963 if(res == VK_SUCCESS)
11965 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
11973 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11975 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11976 VMA_ASSERT(pCurrBlock);
11977 VkResult res = AllocateFromBlock(
11987 if(res == VK_SUCCESS)
11989 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
11997 if(canCreateNewBlock)
12000 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12001 uint32_t newBlockSizeShift = 0;
12002 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12004 if(!m_ExplicitBlockSize)
12007 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12008 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12010 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12011 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12013 newBlockSize = smallerNewBlockSize;
12014 ++newBlockSizeShift;
12023 size_t newBlockIndex = 0;
12024 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12025 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12027 if(!m_ExplicitBlockSize)
12029 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12031 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12032 if(smallerNewBlockSize >= size)
12034 newBlockSize = smallerNewBlockSize;
12035 ++newBlockSizeShift;
12036 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12037 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12046 if(res == VK_SUCCESS)
12048 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12049 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12051 res = AllocateFromBlock(
12061 if(res == VK_SUCCESS)
12063 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12069 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12076 if(canMakeOtherLost)
12078 uint32_t tryIndex = 0;
12079 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12081 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12082 VmaAllocationRequest bestRequest = {};
12083 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12089 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12091 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12092 VMA_ASSERT(pCurrBlock);
12093 VmaAllocationRequest currRequest = {};
12094 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12097 m_BufferImageGranularity,
12106 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12107 if(pBestRequestBlock == VMA_NULL ||
12108 currRequestCost < bestRequestCost)
12110 pBestRequestBlock = pCurrBlock;
12111 bestRequest = currRequest;
12112 bestRequestCost = currRequestCost;
12114 if(bestRequestCost == 0)
12125 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12127 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12128 VMA_ASSERT(pCurrBlock);
12129 VmaAllocationRequest currRequest = {};
12130 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12133 m_BufferImageGranularity,
12142 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12143 if(pBestRequestBlock == VMA_NULL ||
12144 currRequestCost < bestRequestCost ||
12147 pBestRequestBlock = pCurrBlock;
12148 bestRequest = currRequest;
12149 bestRequestCost = currRequestCost;
12151 if(bestRequestCost == 0 ||
12161 if(pBestRequestBlock != VMA_NULL)
12165 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12166 if(res != VK_SUCCESS)
12172 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12178 if(pBestRequestBlock->m_pMetadata->IsEmpty())
12180 m_HasEmptyBlock =
false;
12183 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12184 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12185 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12186 (*pAllocation)->InitBlockAllocation(
12188 bestRequest.offset,
12195 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12196 VMA_DEBUG_LOG(
" Returned from existing block");
12197 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12198 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12199 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12201 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12203 if(IsCorruptionDetectionEnabled())
12205 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12206 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12221 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12223 return VK_ERROR_TOO_MANY_OBJECTS;
12227 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12230 void VmaBlockVector::Free(
12233 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12235 bool budgetExceeded =
false;
12237 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12239 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12240 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12245 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12247 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12249 if(IsCorruptionDetectionEnabled())
12251 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12252 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12255 if(hAllocation->IsPersistentMap())
12257 pBlock->Unmap(m_hAllocator, 1);
12260 pBlock->m_pMetadata->Free(hAllocation);
12261 VMA_HEAVY_ASSERT(pBlock->Validate());
12263 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12265 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12267 if(pBlock->m_pMetadata->IsEmpty())
12270 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12272 pBlockToDelete = pBlock;
12278 m_HasEmptyBlock =
true;
12283 else if(m_HasEmptyBlock && canDeleteBlock)
12285 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12286 if(pLastBlock->m_pMetadata->IsEmpty())
12288 pBlockToDelete = pLastBlock;
12289 m_Blocks.pop_back();
12290 m_HasEmptyBlock =
false;
12294 IncrementallySortBlocks();
12299 if(pBlockToDelete != VMA_NULL)
12301 VMA_DEBUG_LOG(
" Deleted empty block");
12302 pBlockToDelete->Destroy(m_hAllocator);
12303 vma_delete(m_hAllocator, pBlockToDelete);
12307 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12309 VkDeviceSize result = 0;
12310 for(
size_t i = m_Blocks.size(); i--; )
12312 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12313 if(result >= m_PreferredBlockSize)
12321 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12323 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12325 if(m_Blocks[blockIndex] == pBlock)
12327 VmaVectorRemove(m_Blocks, blockIndex);
12334 void VmaBlockVector::IncrementallySortBlocks()
12339 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12341 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12343 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12350 VkResult VmaBlockVector::AllocateFromBlock(
12351 VmaDeviceMemoryBlock* pBlock,
12352 uint32_t currentFrameIndex,
12354 VkDeviceSize alignment,
12357 VmaSuballocationType suballocType,
12366 VmaAllocationRequest currRequest = {};
12367 if(pBlock->m_pMetadata->CreateAllocationRequest(
12370 m_BufferImageGranularity,
12380 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12384 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12385 if(res != VK_SUCCESS)
12392 if(pBlock->m_pMetadata->IsEmpty())
12394 m_HasEmptyBlock =
false;
12397 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12398 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12399 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12400 (*pAllocation)->InitBlockAllocation(
12402 currRequest.offset,
12409 VMA_HEAVY_ASSERT(pBlock->Validate());
12410 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12411 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12412 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12414 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12416 if(IsCorruptionDetectionEnabled())
12418 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12419 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12423 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12426 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12428 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12429 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12430 allocInfo.allocationSize = blockSize;
12431 VkDeviceMemory mem = VK_NULL_HANDLE;
12432 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12441 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12447 allocInfo.allocationSize,
12451 m_Blocks.push_back(pBlock);
12452 if(pNewBlockIndex != VMA_NULL)
12454 *pNewBlockIndex = m_Blocks.size() - 1;
12460 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12461 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12462 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12464 const size_t blockCount = m_Blocks.size();
12465 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12469 BLOCK_FLAG_USED = 0x00000001,
12470 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12478 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12479 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12480 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12483 const size_t moveCount = moves.size();
12484 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12486 const VmaDefragmentationMove& move = moves[moveIndex];
12487 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12488 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12491 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12494 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12496 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12497 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12498 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12500 currBlockInfo.pMappedData = pBlock->GetMappedData();
12502 if(currBlockInfo.pMappedData == VMA_NULL)
12504 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12505 if(pDefragCtx->res == VK_SUCCESS)
12507 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12514 if(pDefragCtx->res == VK_SUCCESS)
12516 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12517 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12519 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12521 const VmaDefragmentationMove& move = moves[moveIndex];
12523 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12524 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12526 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12531 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12532 memRange.memory = pSrcBlock->GetDeviceMemory();
12533 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12534 memRange.size = VMA_MIN(
12535 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12536 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12537 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12542 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12543 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12544 static_cast<size_t>(move.size));
12546 if(IsCorruptionDetectionEnabled())
12548 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12549 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12555 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12556 memRange.memory = pDstBlock->GetDeviceMemory();
12557 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12558 memRange.size = VMA_MIN(
12559 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12560 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12561 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12568 for(
size_t blockIndex = blockCount; blockIndex--; )
12570 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12571 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12573 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12574 pBlock->Unmap(m_hAllocator, 1);
12579 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12580 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12581 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12582 VkCommandBuffer commandBuffer)
12584 const size_t blockCount = m_Blocks.size();
12586 pDefragCtx->blockContexts.resize(blockCount);
12587 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12590 const size_t moveCount = moves.size();
12591 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12593 const VmaDefragmentationMove& move = moves[moveIndex];
12594 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12595 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12598 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12602 VkBufferCreateInfo bufCreateInfo;
12603 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12605 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12607 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12608 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12609 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12611 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12612 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12613 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12614 if(pDefragCtx->res == VK_SUCCESS)
12616 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12617 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12624 if(pDefragCtx->res == VK_SUCCESS)
12626 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12628 const VmaDefragmentationMove& move = moves[moveIndex];
12630 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12631 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12633 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12635 VkBufferCopy region = {
12639 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12640 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12645 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12647 pDefragCtx->res = VK_NOT_READY;
12653 m_HasEmptyBlock =
false;
12654 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12656 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12657 if(pBlock->m_pMetadata->IsEmpty())
12659 if(m_Blocks.size() > m_MinBlockCount)
12661 if(pDefragmentationStats != VMA_NULL)
12664 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12667 VmaVectorRemove(m_Blocks, blockIndex);
12668 pBlock->Destroy(m_hAllocator);
12669 vma_delete(m_hAllocator, pBlock);
12673 m_HasEmptyBlock =
true;
12679 #if VMA_STATS_STRING_ENABLED
12681 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12683 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12685 json.BeginObject();
12689 const char* poolName = m_hParentPool->GetName();
12690 if(poolName != VMA_NULL && poolName[0] !=
'\0')
12692 json.WriteString(
"Name");
12693 json.WriteString(poolName);
12696 json.WriteString(
"MemoryTypeIndex");
12697 json.WriteNumber(m_MemoryTypeIndex);
12699 json.WriteString(
"BlockSize");
12700 json.WriteNumber(m_PreferredBlockSize);
12702 json.WriteString(
"BlockCount");
12703 json.BeginObject(
true);
12704 if(m_MinBlockCount > 0)
12706 json.WriteString(
"Min");
12707 json.WriteNumber((uint64_t)m_MinBlockCount);
12709 if(m_MaxBlockCount < SIZE_MAX)
12711 json.WriteString(
"Max");
12712 json.WriteNumber((uint64_t)m_MaxBlockCount);
12714 json.WriteString(
"Cur");
12715 json.WriteNumber((uint64_t)m_Blocks.size());
12718 if(m_FrameInUseCount > 0)
12720 json.WriteString(
"FrameInUseCount");
12721 json.WriteNumber(m_FrameInUseCount);
12724 if(m_Algorithm != 0)
12726 json.WriteString(
"Algorithm");
12727 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12732 json.WriteString(
"PreferredBlockSize");
12733 json.WriteNumber(m_PreferredBlockSize);
12736 json.WriteString(
"Blocks");
12737 json.BeginObject();
12738 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12740 json.BeginString();
12741 json.ContinueString(m_Blocks[i]->GetId());
12744 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12751 #endif // #if VMA_STATS_STRING_ENABLED
12753 void VmaBlockVector::Defragment(
12754 class VmaBlockVectorDefragmentationContext* pCtx,
12756 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12757 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12758 VkCommandBuffer commandBuffer)
12760 pCtx->res = VK_SUCCESS;
12762 const VkMemoryPropertyFlags memPropFlags =
12763 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12764 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12766 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12768 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12769 !IsCorruptionDetectionEnabled() &&
12770 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12773 if(canDefragmentOnCpu || canDefragmentOnGpu)
12775 bool defragmentOnGpu;
12777 if(canDefragmentOnGpu != canDefragmentOnCpu)
12779 defragmentOnGpu = canDefragmentOnGpu;
12784 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12785 m_hAllocator->IsIntegratedGpu();
12788 bool overlappingMoveSupported = !defragmentOnGpu;
12790 if(m_hAllocator->m_UseMutex)
12792 m_Mutex.LockWrite();
12793 pCtx->mutexLocked =
true;
12796 pCtx->Begin(overlappingMoveSupported);
12800 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12801 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12802 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12803 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12804 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12807 if(pStats != VMA_NULL)
12809 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12810 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12813 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12814 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12815 if(defragmentOnGpu)
12817 maxGpuBytesToMove -= bytesMoved;
12818 maxGpuAllocationsToMove -= allocationsMoved;
12822 maxCpuBytesToMove -= bytesMoved;
12823 maxCpuAllocationsToMove -= allocationsMoved;
12827 if(pCtx->res >= VK_SUCCESS)
12829 if(defragmentOnGpu)
12831 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12835 ApplyDefragmentationMovesCpu(pCtx, moves);
12841 void VmaBlockVector::DefragmentationEnd(
12842 class VmaBlockVectorDefragmentationContext* pCtx,
12846 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12848 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12849 if(blockCtx.hBuffer)
12851 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12852 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12856 if(pCtx->res >= VK_SUCCESS)
12858 FreeEmptyBlocks(pStats);
12861 if(pCtx->mutexLocked)
12863 VMA_ASSERT(m_hAllocator->m_UseMutex);
12864 m_Mutex.UnlockWrite();
12868 size_t VmaBlockVector::CalcAllocationCount()
const
12871 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12873 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12878 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
12880 if(m_BufferImageGranularity == 1)
12884 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12885 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12887 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12888 VMA_ASSERT(m_Algorithm == 0);
12889 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12890 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12898 void VmaBlockVector::MakePoolAllocationsLost(
12899 uint32_t currentFrameIndex,
12900 size_t* pLostAllocationCount)
12902 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12903 size_t lostAllocationCount = 0;
12904 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12906 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12907 VMA_ASSERT(pBlock);
12908 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12910 if(pLostAllocationCount != VMA_NULL)
12912 *pLostAllocationCount = lostAllocationCount;
12916 VkResult VmaBlockVector::CheckCorruption()
12918 if(!IsCorruptionDetectionEnabled())
12920 return VK_ERROR_FEATURE_NOT_PRESENT;
12923 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12924 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12926 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12927 VMA_ASSERT(pBlock);
12928 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12929 if(res != VK_SUCCESS)
12937 void VmaBlockVector::AddStats(
VmaStats* pStats)
12939 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12940 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12942 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12944 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12946 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12947 VMA_ASSERT(pBlock);
12948 VMA_HEAVY_ASSERT(pBlock->Validate());
12950 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12951 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12952 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12953 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12960 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12962 VmaBlockVector* pBlockVector,
12963 uint32_t currentFrameIndex,
12964 bool overlappingMoveSupported) :
12965 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12966 m_AllocationCount(0),
12967 m_AllAllocations(false),
12969 m_AllocationsMoved(0),
12970 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12973 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12974 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12976 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12977 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12978 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12979 m_Blocks.push_back(pBlockInfo);
12983 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12986 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12988 for(
size_t i = m_Blocks.size(); i--; )
12990 vma_delete(m_hAllocator, m_Blocks[i]);
12994 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12997 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12999 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13000 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13001 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13003 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13004 (*it)->m_Allocations.push_back(allocInfo);
13011 ++m_AllocationCount;
13015 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13016 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13017 VkDeviceSize maxBytesToMove,
13018 uint32_t maxAllocationsToMove)
13020 if(m_Blocks.empty())
13033 size_t srcBlockMinIndex = 0;
13046 size_t srcBlockIndex = m_Blocks.size() - 1;
13047 size_t srcAllocIndex = SIZE_MAX;
13053 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13055 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13058 if(srcBlockIndex == srcBlockMinIndex)
13065 srcAllocIndex = SIZE_MAX;
13070 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13074 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13075 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13077 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13078 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13079 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13080 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13083 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13085 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13086 VmaAllocationRequest dstAllocRequest;
13087 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13088 m_CurrentFrameIndex,
13089 m_pBlockVector->GetFrameInUseCount(),
13090 m_pBlockVector->GetBufferImageGranularity(),
13097 &dstAllocRequest) &&
13099 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13101 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13104 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13105 (m_BytesMoved + size > maxBytesToMove))
13110 VmaDefragmentationMove move;
13111 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13112 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13113 move.srcOffset = srcOffset;
13114 move.dstOffset = dstAllocRequest.offset;
13116 moves.push_back(move);
13118 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13122 allocInfo.m_hAllocation);
13123 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13125 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13127 if(allocInfo.m_pChanged != VMA_NULL)
13129 *allocInfo.m_pChanged = VK_TRUE;
13132 ++m_AllocationsMoved;
13133 m_BytesMoved += size;
13135 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13143 if(srcAllocIndex > 0)
13149 if(srcBlockIndex > 0)
13152 srcAllocIndex = SIZE_MAX;
13162 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13165 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13167 if(m_Blocks[i]->m_HasNonMovableAllocations)
13175 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13176 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13177 VkDeviceSize maxBytesToMove,
13178 uint32_t maxAllocationsToMove)
13180 if(!m_AllAllocations && m_AllocationCount == 0)
13185 const size_t blockCount = m_Blocks.size();
13186 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13188 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13190 if(m_AllAllocations)
13192 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13193 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13194 it != pMetadata->m_Suballocations.end();
13197 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13199 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13200 pBlockInfo->m_Allocations.push_back(allocInfo);
13205 pBlockInfo->CalcHasNonMovableAllocations();
13209 pBlockInfo->SortAllocationsByOffsetDescending();
13215 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13218 const uint32_t roundCount = 2;
13221 VkResult result = VK_SUCCESS;
13222 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13224 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
13230 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13231 size_t dstBlockIndex, VkDeviceSize dstOffset,
13232 size_t srcBlockIndex, VkDeviceSize srcOffset)
13234 if(dstBlockIndex < srcBlockIndex)
13238 if(dstBlockIndex > srcBlockIndex)
13242 if(dstOffset < srcOffset)
13252 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13254 VmaBlockVector* pBlockVector,
13255 uint32_t currentFrameIndex,
13256 bool overlappingMoveSupported) :
13257 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13258 m_OverlappingMoveSupported(overlappingMoveSupported),
13259 m_AllocationCount(0),
13260 m_AllAllocations(false),
13262 m_AllocationsMoved(0),
13263 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13265 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13269 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13273 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13274 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13275 VkDeviceSize maxBytesToMove,
13276 uint32_t maxAllocationsToMove)
13278 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13280 const size_t blockCount = m_pBlockVector->GetBlockCount();
13281 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13286 PreprocessMetadata();
13290 m_BlockInfos.resize(blockCount);
13291 for(
size_t i = 0; i < blockCount; ++i)
13293 m_BlockInfos[i].origBlockIndex = i;
13296 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13297 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13298 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13303 FreeSpaceDatabase freeSpaceDb;
13305 size_t dstBlockInfoIndex = 0;
13306 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13307 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13308 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13309 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13310 VkDeviceSize dstOffset = 0;
13313 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13315 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13316 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13317 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13318 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13319 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13321 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13322 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13323 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13324 if(m_AllocationsMoved == maxAllocationsToMove ||
13325 m_BytesMoved + srcAllocSize > maxBytesToMove)
13330 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13333 size_t freeSpaceInfoIndex;
13334 VkDeviceSize dstAllocOffset;
13335 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13336 freeSpaceInfoIndex, dstAllocOffset))
13338 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13339 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13340 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13343 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13345 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13349 VmaSuballocation suballoc = *srcSuballocIt;
13350 suballoc.offset = dstAllocOffset;
13351 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13352 m_BytesMoved += srcAllocSize;
13353 ++m_AllocationsMoved;
13355 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13357 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13358 srcSuballocIt = nextSuballocIt;
13360 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13362 VmaDefragmentationMove move = {
13363 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13364 srcAllocOffset, dstAllocOffset,
13366 moves.push_back(move);
13373 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13375 VmaSuballocation suballoc = *srcSuballocIt;
13376 suballoc.offset = dstAllocOffset;
13377 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13378 m_BytesMoved += srcAllocSize;
13379 ++m_AllocationsMoved;
13381 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13383 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13384 srcSuballocIt = nextSuballocIt;
13386 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13388 VmaDefragmentationMove move = {
13389 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13390 srcAllocOffset, dstAllocOffset,
13392 moves.push_back(move);
13397 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13400 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13401 dstAllocOffset + srcAllocSize > dstBlockSize)
13404 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13406 ++dstBlockInfoIndex;
13407 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13408 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13409 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13410 dstBlockSize = pDstMetadata->GetSize();
13412 dstAllocOffset = 0;
13416 if(dstBlockInfoIndex == srcBlockInfoIndex)
13418 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13420 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13422 bool skipOver = overlap;
13423 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13427 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13432 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13434 dstOffset = srcAllocOffset + srcAllocSize;
13440 srcSuballocIt->offset = dstAllocOffset;
13441 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13442 dstOffset = dstAllocOffset + srcAllocSize;
13443 m_BytesMoved += srcAllocSize;
13444 ++m_AllocationsMoved;
13446 VmaDefragmentationMove move = {
13447 srcOrigBlockIndex, dstOrigBlockIndex,
13448 srcAllocOffset, dstAllocOffset,
13450 moves.push_back(move);
13458 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13459 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13461 VmaSuballocation suballoc = *srcSuballocIt;
13462 suballoc.offset = dstAllocOffset;
13463 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13464 dstOffset = dstAllocOffset + srcAllocSize;
13465 m_BytesMoved += srcAllocSize;
13466 ++m_AllocationsMoved;
13468 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13470 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13471 srcSuballocIt = nextSuballocIt;
13473 pDstMetadata->m_Suballocations.push_back(suballoc);
13475 VmaDefragmentationMove move = {
13476 srcOrigBlockIndex, dstOrigBlockIndex,
13477 srcAllocOffset, dstAllocOffset,
13479 moves.push_back(move);
13485 m_BlockInfos.clear();
13487 PostprocessMetadata();
13492 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13494 const size_t blockCount = m_pBlockVector->GetBlockCount();
13495 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13497 VmaBlockMetadata_Generic*
const pMetadata =
13498 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13499 pMetadata->m_FreeCount = 0;
13500 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13501 pMetadata->m_FreeSuballocationsBySize.clear();
13502 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13503 it != pMetadata->m_Suballocations.end(); )
13505 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13507 VmaSuballocationList::iterator nextIt = it;
13509 pMetadata->m_Suballocations.erase(it);
13520 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13522 const size_t blockCount = m_pBlockVector->GetBlockCount();
13523 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13525 VmaBlockMetadata_Generic*
const pMetadata =
13526 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13527 const VkDeviceSize blockSize = pMetadata->GetSize();
13530 if(pMetadata->m_Suballocations.empty())
13532 pMetadata->m_FreeCount = 1;
13534 VmaSuballocation suballoc = {
13538 VMA_SUBALLOCATION_TYPE_FREE };
13539 pMetadata->m_Suballocations.push_back(suballoc);
13540 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13545 VkDeviceSize offset = 0;
13546 VmaSuballocationList::iterator it;
13547 for(it = pMetadata->m_Suballocations.begin();
13548 it != pMetadata->m_Suballocations.end();
13551 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13552 VMA_ASSERT(it->offset >= offset);
13555 if(it->offset > offset)
13557 ++pMetadata->m_FreeCount;
13558 const VkDeviceSize freeSize = it->offset - offset;
13559 VmaSuballocation suballoc = {
13563 VMA_SUBALLOCATION_TYPE_FREE };
13564 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13565 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13567 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13571 pMetadata->m_SumFreeSize -= it->size;
13572 offset = it->offset + it->size;
13576 if(offset < blockSize)
13578 ++pMetadata->m_FreeCount;
13579 const VkDeviceSize freeSize = blockSize - offset;
13580 VmaSuballocation suballoc = {
13584 VMA_SUBALLOCATION_TYPE_FREE };
13585 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13586 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13587 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13589 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13594 pMetadata->m_FreeSuballocationsBySize.begin(),
13595 pMetadata->m_FreeSuballocationsBySize.end(),
13596 VmaSuballocationItemSizeLess());
13599 VMA_HEAVY_ASSERT(pMetadata->Validate());
13603 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13606 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13607 while(it != pMetadata->m_Suballocations.end())
13609 if(it->offset < suballoc.offset)
13614 pMetadata->m_Suballocations.insert(it, suballoc);
13620 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13623 VmaBlockVector* pBlockVector,
13624 uint32_t currFrameIndex) :
13626 mutexLocked(false),
13627 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13628 m_hAllocator(hAllocator),
13629 m_hCustomPool(hCustomPool),
13630 m_pBlockVector(pBlockVector),
13631 m_CurrFrameIndex(currFrameIndex),
13632 m_pAlgorithm(VMA_NULL),
13633 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13634 m_AllAllocations(false)
13638 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13640 vma_delete(m_hAllocator, m_pAlgorithm);
13643 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13645 AllocInfo info = { hAlloc, pChanged };
13646 m_Allocations.push_back(info);
13649 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13651 const bool allAllocations = m_AllAllocations ||
13652 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13664 if(VMA_DEBUG_MARGIN == 0 &&
13666 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13668 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13669 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13673 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13674 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13679 m_pAlgorithm->AddAll();
13683 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13685 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13693 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13695 uint32_t currFrameIndex,
13698 m_hAllocator(hAllocator),
13699 m_CurrFrameIndex(currFrameIndex),
13702 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13704 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13707 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13709 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13711 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13712 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13713 vma_delete(m_hAllocator, pBlockVectorCtx);
13715 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13717 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13718 if(pBlockVectorCtx)
13720 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13721 vma_delete(m_hAllocator, pBlockVectorCtx);
13726 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13728 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13730 VmaPool pool = pPools[poolIndex];
13733 if(pool->m_BlockVector.GetAlgorithm() == 0)
13735 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13737 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13739 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13741 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13746 if(!pBlockVectorDefragCtx)
13748 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13751 &pool->m_BlockVector,
13753 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13756 pBlockVectorDefragCtx->AddAll();
13761 void VmaDefragmentationContext_T::AddAllocations(
13762 uint32_t allocationCount,
13764 VkBool32* pAllocationsChanged)
13767 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13770 VMA_ASSERT(hAlloc);
13772 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13774 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13776 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13778 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13780 if(hAllocPool != VK_NULL_HANDLE)
13783 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13785 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13787 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13789 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13793 if(!pBlockVectorDefragCtx)
13795 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13798 &hAllocPool->m_BlockVector,
13800 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13807 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13808 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13809 if(!pBlockVectorDefragCtx)
13811 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13814 m_hAllocator->m_pBlockVectors[memTypeIndex],
13816 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13820 if(pBlockVectorDefragCtx)
13822 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13823 &pAllocationsChanged[allocIndex] : VMA_NULL;
13824 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13830 VkResult VmaDefragmentationContext_T::Defragment(
13831 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13832 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13840 if(commandBuffer == VK_NULL_HANDLE)
13842 maxGpuBytesToMove = 0;
13843 maxGpuAllocationsToMove = 0;
13846 VkResult res = VK_SUCCESS;
13849 for(uint32_t memTypeIndex = 0;
13850 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13853 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13854 if(pBlockVectorCtx)
13856 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13857 pBlockVectorCtx->GetBlockVector()->Defragment(
13860 maxCpuBytesToMove, maxCpuAllocationsToMove,
13861 maxGpuBytesToMove, maxGpuAllocationsToMove,
13863 if(pBlockVectorCtx->res != VK_SUCCESS)
13865 res = pBlockVectorCtx->res;
13871 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13872 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13875 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13876 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13877 pBlockVectorCtx->GetBlockVector()->Defragment(
13880 maxCpuBytesToMove, maxCpuAllocationsToMove,
13881 maxGpuBytesToMove, maxGpuAllocationsToMove,
13883 if(pBlockVectorCtx->res != VK_SUCCESS)
13885 res = pBlockVectorCtx->res;
13895 #if VMA_RECORDING_ENABLED
13897 VmaRecorder::VmaRecorder() :
13902 m_StartCounter(INT64_MAX)
13908 m_UseMutex = useMutex;
13909 m_Flags = settings.
flags;
13911 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13912 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13915 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13918 return VK_ERROR_INITIALIZATION_FAILED;
13922 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13923 fprintf(m_File,
"%s\n",
"1,7");
13928 VmaRecorder::~VmaRecorder()
13930 if(m_File != VMA_NULL)
13936 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13938 CallParams callParams;
13939 GetBasicParams(callParams);
13941 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13942 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13946 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13948 CallParams callParams;
13949 GetBasicParams(callParams);
13951 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13952 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13958 CallParams callParams;
13959 GetBasicParams(callParams);
13961 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13962 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13973 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13975 CallParams callParams;
13976 GetBasicParams(callParams);
13978 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13979 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13984 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13985 const VkMemoryRequirements& vkMemReq,
13989 CallParams callParams;
13990 GetBasicParams(callParams);
13992 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13993 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13994 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13996 vkMemReq.alignment,
13997 vkMemReq.memoryTypeBits,
14005 userDataStr.GetString());
14009 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14010 const VkMemoryRequirements& vkMemReq,
14012 uint64_t allocationCount,
14015 CallParams callParams;
14016 GetBasicParams(callParams);
14018 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14019 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14020 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14022 vkMemReq.alignment,
14023 vkMemReq.memoryTypeBits,
14030 PrintPointerList(allocationCount, pAllocations);
14031 fprintf(m_File,
",%s\n", userDataStr.GetString());
14035 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14036 const VkMemoryRequirements& vkMemReq,
14037 bool requiresDedicatedAllocation,
14038 bool prefersDedicatedAllocation,
14042 CallParams callParams;
14043 GetBasicParams(callParams);
14045 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14046 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14047 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14049 vkMemReq.alignment,
14050 vkMemReq.memoryTypeBits,
14051 requiresDedicatedAllocation ? 1 : 0,
14052 prefersDedicatedAllocation ? 1 : 0,
14060 userDataStr.GetString());
14064 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14065 const VkMemoryRequirements& vkMemReq,
14066 bool requiresDedicatedAllocation,
14067 bool prefersDedicatedAllocation,
14071 CallParams callParams;
14072 GetBasicParams(callParams);
14074 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14075 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14076 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14078 vkMemReq.alignment,
14079 vkMemReq.memoryTypeBits,
14080 requiresDedicatedAllocation ? 1 : 0,
14081 prefersDedicatedAllocation ? 1 : 0,
14089 userDataStr.GetString());
14093 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14096 CallParams callParams;
14097 GetBasicParams(callParams);
14099 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14100 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14105 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14106 uint64_t allocationCount,
14109 CallParams callParams;
14110 GetBasicParams(callParams);
14112 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14113 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14114 PrintPointerList(allocationCount, pAllocations);
14115 fprintf(m_File,
"\n");
14119 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14121 const void* pUserData)
14123 CallParams callParams;
14124 GetBasicParams(callParams);
14126 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14127 UserDataString userDataStr(
14130 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14132 userDataStr.GetString());
14136 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14139 CallParams callParams;
14140 GetBasicParams(callParams);
14142 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14143 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14148 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14151 CallParams callParams;
14152 GetBasicParams(callParams);
14154 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14155 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14160 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14163 CallParams callParams;
14164 GetBasicParams(callParams);
14166 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14167 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14172 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14173 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14175 CallParams callParams;
14176 GetBasicParams(callParams);
14178 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14179 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14186 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14187 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14189 CallParams callParams;
14190 GetBasicParams(callParams);
14192 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14193 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14200 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14201 const VkBufferCreateInfo& bufCreateInfo,
14205 CallParams callParams;
14206 GetBasicParams(callParams);
14208 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14209 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14210 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14211 bufCreateInfo.flags,
14212 bufCreateInfo.size,
14213 bufCreateInfo.usage,
14214 bufCreateInfo.sharingMode,
14215 allocCreateInfo.
flags,
14216 allocCreateInfo.
usage,
14220 allocCreateInfo.
pool,
14222 userDataStr.GetString());
14226 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14227 const VkImageCreateInfo& imageCreateInfo,
14231 CallParams callParams;
14232 GetBasicParams(callParams);
14234 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14235 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14236 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14237 imageCreateInfo.flags,
14238 imageCreateInfo.imageType,
14239 imageCreateInfo.format,
14240 imageCreateInfo.extent.width,
14241 imageCreateInfo.extent.height,
14242 imageCreateInfo.extent.depth,
14243 imageCreateInfo.mipLevels,
14244 imageCreateInfo.arrayLayers,
14245 imageCreateInfo.samples,
14246 imageCreateInfo.tiling,
14247 imageCreateInfo.usage,
14248 imageCreateInfo.sharingMode,
14249 imageCreateInfo.initialLayout,
14250 allocCreateInfo.
flags,
14251 allocCreateInfo.
usage,
14255 allocCreateInfo.
pool,
14257 userDataStr.GetString());
14261 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14264 CallParams callParams;
14265 GetBasicParams(callParams);
14267 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14268 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14273 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14276 CallParams callParams;
14277 GetBasicParams(callParams);
14279 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14280 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14285 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14288 CallParams callParams;
14289 GetBasicParams(callParams);
14291 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14292 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14297 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14300 CallParams callParams;
14301 GetBasicParams(callParams);
14303 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14304 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14309 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14312 CallParams callParams;
14313 GetBasicParams(callParams);
14315 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14316 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14321 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14325 CallParams callParams;
14326 GetBasicParams(callParams);
14328 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14329 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14332 fprintf(m_File,
",");
14334 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14344 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14347 CallParams callParams;
14348 GetBasicParams(callParams);
14350 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14351 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14356 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14360 CallParams callParams;
14361 GetBasicParams(callParams);
14363 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14364 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14365 pool, name != VMA_NULL ? name :
"");
14371 if(pUserData != VMA_NULL)
14375 m_Str = (
const char*)pUserData;
14379 sprintf_s(m_PtrStr,
"%p", pUserData);
14389 void VmaRecorder::WriteConfiguration(
14390 const VkPhysicalDeviceProperties& devProps,
14391 const VkPhysicalDeviceMemoryProperties& memProps,
14392 bool dedicatedAllocationExtensionEnabled,
14393 bool bindMemory2ExtensionEnabled,
14394 bool memoryBudgetExtensionEnabled)
14396 fprintf(m_File,
"Config,Begin\n");
14398 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14399 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14400 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14401 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14402 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14403 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14405 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14406 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14407 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14409 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14410 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14412 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14413 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14415 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14416 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14418 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14419 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14422 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14423 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14424 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14426 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14427 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14428 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14429 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14430 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14431 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14432 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14433 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14434 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14436 fprintf(m_File,
"Config,End\n");
14439 void VmaRecorder::GetBasicParams(CallParams& outParams)
14441 outParams.threadId = GetCurrentThreadId();
14443 LARGE_INTEGER counter;
14444 QueryPerformanceCounter(&counter);
14445 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14448 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14452 fprintf(m_File,
"%p", pItems[0]);
14453 for(uint64_t i = 1; i < count; ++i)
14455 fprintf(m_File,
" %p", pItems[i]);
14460 void VmaRecorder::Flush()
14468 #endif // #if VMA_RECORDING_ENABLED
14473 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14474 m_Allocator(pAllocationCallbacks, 1024)
14480 VmaMutexLock mutexLock(m_Mutex);
14481 return m_Allocator.Alloc();
14484 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14486 VmaMutexLock mutexLock(m_Mutex);
14487 m_Allocator.Free(hAlloc);
14498 m_hDevice(pCreateInfo->device),
14499 m_hInstance(pCreateInfo->instance),
14500 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14501 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14502 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14503 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14504 m_HeapSizeLimitMask(0),
14505 m_PreferredLargeHeapBlockSize(0),
14506 m_PhysicalDevice(pCreateInfo->physicalDevice),
14507 m_CurrentFrameIndex(0),
14508 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14509 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14512 ,m_pRecorder(VMA_NULL)
14515 if(VMA_DEBUG_DETECT_CORRUPTION)
14518 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14523 #if !(VMA_DEDICATED_ALLOCATION)
14526 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14529 #if !(VMA_BIND_MEMORY2)
14532 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14535 #if !(VMA_MEMORY_BUDGET)
14538 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14542 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14543 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14544 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14546 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14547 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14548 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14558 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14559 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14561 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14562 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14563 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14564 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14571 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14573 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14574 if(limit != VK_WHOLE_SIZE)
14576 m_HeapSizeLimitMask |= 1u << heapIndex;
14577 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14579 m_MemProps.memoryHeaps[heapIndex].size = limit;
14585 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14587 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14589 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14593 preferredBlockSize,
14596 GetBufferImageGranularity(),
14602 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14609 VkResult res = VK_SUCCESS;
14614 #if VMA_RECORDING_ENABLED
14615 m_pRecorder = vma_new(
this, VmaRecorder)();
14617 if(res != VK_SUCCESS)
14621 m_pRecorder->WriteConfiguration(
14622 m_PhysicalDeviceProperties,
14624 m_UseKhrDedicatedAllocation,
14625 m_UseKhrBindMemory2,
14626 m_UseExtMemoryBudget);
14627 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14629 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14630 return VK_ERROR_FEATURE_NOT_PRESENT;
14634 #if VMA_MEMORY_BUDGET
14635 if(m_UseExtMemoryBudget)
14637 UpdateVulkanBudget();
14639 #endif // #if VMA_MEMORY_BUDGET
14644 VmaAllocator_T::~VmaAllocator_T()
14646 #if VMA_RECORDING_ENABLED
14647 if(m_pRecorder != VMA_NULL)
14649 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14650 vma_delete(
this, m_pRecorder);
14654 VMA_ASSERT(m_Pools.empty());
14656 for(
size_t i = GetMemoryTypeCount(); i--; )
14658 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14660 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14663 vma_delete(
this, m_pDedicatedAllocations[i]);
14664 vma_delete(
this, m_pBlockVectors[i]);
14668 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14670 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14671 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14672 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14673 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14674 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14675 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14676 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14677 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14678 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14679 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14680 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14681 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14682 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14683 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14684 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14685 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14686 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14687 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14688 #if VMA_DEDICATED_ALLOCATION
14689 if(m_UseKhrDedicatedAllocation)
14691 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14692 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14693 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14694 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14696 #endif // #if VMA_DEDICATED_ALLOCATION
14697 #if VMA_BIND_MEMORY2
14698 if(m_UseKhrBindMemory2)
14700 m_VulkanFunctions.vkBindBufferMemory2KHR =
14701 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14702 m_VulkanFunctions.vkBindImageMemory2KHR =
14703 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14705 #endif // #if VMA_BIND_MEMORY2
14706 #if VMA_MEMORY_BUDGET
14707 if(m_UseExtMemoryBudget)
14709 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14710 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14711 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
14713 #endif // #if VMA_MEMORY_BUDGET
14714 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14716 #define VMA_COPY_IF_NOT_NULL(funcName) \
14717 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14719 if(pVulkanFunctions != VMA_NULL)
14721 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14722 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14723 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14724 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14725 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14726 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14727 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14728 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14729 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14730 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14731 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14732 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14733 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14734 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14735 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14736 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14737 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14738 #if VMA_DEDICATED_ALLOCATION
14739 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14740 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14742 #if VMA_BIND_MEMORY2
14743 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14744 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14746 #if VMA_MEMORY_BUDGET
14747 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
14751 #undef VMA_COPY_IF_NOT_NULL
14755 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14756 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14757 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14758 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14759 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14760 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14761 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14762 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14763 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14764 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14765 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14766 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14767 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14768 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14769 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14770 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14771 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14772 #if VMA_DEDICATED_ALLOCATION
14773 if(m_UseKhrDedicatedAllocation)
14775 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14776 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14779 #if VMA_BIND_MEMORY2
14780 if(m_UseKhrBindMemory2)
14782 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14783 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14786 #if VMA_MEMORY_BUDGET
14787 if(m_UseExtMemoryBudget)
14789 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14794 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14796 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14797 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14798 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14799 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
14802 VkResult VmaAllocator_T::AllocateMemoryOfType(
14804 VkDeviceSize alignment,
14805 bool dedicatedAllocation,
14806 VkBuffer dedicatedBuffer,
14807 VkImage dedicatedImage,
14809 uint32_t memTypeIndex,
14810 VmaSuballocationType suballocType,
14811 size_t allocationCount,
14814 VMA_ASSERT(pAllocations != VMA_NULL);
14815 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14821 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14831 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14832 VMA_ASSERT(blockVector);
14834 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14835 bool preferDedicatedMemory =
14836 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14837 dedicatedAllocation ||
14839 size > preferredBlockSize / 2;
14841 if(preferDedicatedMemory &&
14843 finalCreateInfo.
pool == VK_NULL_HANDLE)
14852 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14856 return AllocateDedicatedMemory(
14872 VkResult res = blockVector->Allocate(
14873 m_CurrentFrameIndex.load(),
14880 if(res == VK_SUCCESS)
14888 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14892 res = AllocateDedicatedMemory(
14899 finalCreateInfo.pUserData,
14904 if(res == VK_SUCCESS)
14907 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14913 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14920 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14922 VmaSuballocationType suballocType,
14923 uint32_t memTypeIndex,
14926 bool isUserDataString,
14928 VkBuffer dedicatedBuffer,
14929 VkImage dedicatedImage,
14930 size_t allocationCount,
14933 VMA_ASSERT(allocationCount > 0 && pAllocations);
14937 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14939 GetBudget(&heapBudget, heapIndex, 1);
14940 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
14942 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14946 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14947 allocInfo.memoryTypeIndex = memTypeIndex;
14948 allocInfo.allocationSize = size;
14950 #if VMA_DEDICATED_ALLOCATION
14951 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14952 if(m_UseKhrDedicatedAllocation)
14954 if(dedicatedBuffer != VK_NULL_HANDLE)
14956 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14957 dedicatedAllocInfo.buffer = dedicatedBuffer;
14958 allocInfo.pNext = &dedicatedAllocInfo;
14960 else if(dedicatedImage != VK_NULL_HANDLE)
14962 dedicatedAllocInfo.image = dedicatedImage;
14963 allocInfo.pNext = &dedicatedAllocInfo;
14966 #endif // #if VMA_DEDICATED_ALLOCATION
14969 VkResult res = VK_SUCCESS;
14970 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14972 res = AllocateDedicatedMemoryPage(
14980 pAllocations + allocIndex);
14981 if(res != VK_SUCCESS)
14987 if(res == VK_SUCCESS)
14991 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14992 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14993 VMA_ASSERT(pDedicatedAllocations);
14994 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14996 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15000 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15005 while(allocIndex--)
15008 VkDeviceMemory hMemory = currAlloc->GetMemory();
15020 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15021 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15022 currAlloc->SetUserData(
this, VMA_NULL);
15024 m_AllocationObjectAllocator.Free(currAlloc);
15027 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15033 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15035 VmaSuballocationType suballocType,
15036 uint32_t memTypeIndex,
15037 const VkMemoryAllocateInfo& allocInfo,
15039 bool isUserDataString,
15043 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15044 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15047 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15051 void* pMappedData = VMA_NULL;
15054 res = (*m_VulkanFunctions.vkMapMemory)(
15063 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15064 FreeVulkanMemory(memTypeIndex, size, hMemory);
15069 *pAllocation = m_AllocationObjectAllocator.Allocate();
15070 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
15071 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15072 (*pAllocation)->SetUserData(
this, pUserData);
15073 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15074 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15076 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15082 void VmaAllocator_T::GetBufferMemoryRequirements(
15084 VkMemoryRequirements& memReq,
15085 bool& requiresDedicatedAllocation,
15086 bool& prefersDedicatedAllocation)
const
15088 #if VMA_DEDICATED_ALLOCATION
15089 if(m_UseKhrDedicatedAllocation)
15091 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15092 memReqInfo.buffer = hBuffer;
15094 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15096 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15097 memReq2.pNext = &memDedicatedReq;
15099 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15101 memReq = memReq2.memoryRequirements;
15102 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15103 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15106 #endif // #if VMA_DEDICATED_ALLOCATION
15108 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15109 requiresDedicatedAllocation =
false;
15110 prefersDedicatedAllocation =
false;
15114 void VmaAllocator_T::GetImageMemoryRequirements(
15116 VkMemoryRequirements& memReq,
15117 bool& requiresDedicatedAllocation,
15118 bool& prefersDedicatedAllocation)
const
15120 #if VMA_DEDICATED_ALLOCATION
15121 if(m_UseKhrDedicatedAllocation)
15123 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15124 memReqInfo.image = hImage;
15126 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15128 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15129 memReq2.pNext = &memDedicatedReq;
15131 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15133 memReq = memReq2.memoryRequirements;
15134 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15135 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15138 #endif // #if VMA_DEDICATED_ALLOCATION
15140 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15141 requiresDedicatedAllocation =
false;
15142 prefersDedicatedAllocation =
false;
15146 VkResult VmaAllocator_T::AllocateMemory(
15147 const VkMemoryRequirements& vkMemReq,
15148 bool requiresDedicatedAllocation,
15149 bool prefersDedicatedAllocation,
15150 VkBuffer dedicatedBuffer,
15151 VkImage dedicatedImage,
15153 VmaSuballocationType suballocType,
15154 size_t allocationCount,
15157 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15159 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15161 if(vkMemReq.size == 0)
15163 return VK_ERROR_VALIDATION_FAILED_EXT;
15168 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15169 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15174 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15175 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15177 if(requiresDedicatedAllocation)
15181 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15182 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15184 if(createInfo.
pool != VK_NULL_HANDLE)
15186 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15187 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15190 if((createInfo.
pool != VK_NULL_HANDLE) &&
15193 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15194 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15197 if(createInfo.
pool != VK_NULL_HANDLE)
15199 const VkDeviceSize alignmentForPool = VMA_MAX(
15200 vkMemReq.alignment,
15201 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15206 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15211 return createInfo.
pool->m_BlockVector.Allocate(
15212 m_CurrentFrameIndex.load(),
15223 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15224 uint32_t memTypeIndex = UINT32_MAX;
15226 if(res == VK_SUCCESS)
15228 VkDeviceSize alignmentForMemType = VMA_MAX(
15229 vkMemReq.alignment,
15230 GetMemoryTypeMinAlignment(memTypeIndex));
15232 res = AllocateMemoryOfType(
15234 alignmentForMemType,
15235 requiresDedicatedAllocation || prefersDedicatedAllocation,
15244 if(res == VK_SUCCESS)
15254 memoryTypeBits &= ~(1u << memTypeIndex);
15257 if(res == VK_SUCCESS)
15259 alignmentForMemType = VMA_MAX(
15260 vkMemReq.alignment,
15261 GetMemoryTypeMinAlignment(memTypeIndex));
15263 res = AllocateMemoryOfType(
15265 alignmentForMemType,
15266 requiresDedicatedAllocation || prefersDedicatedAllocation,
15275 if(res == VK_SUCCESS)
15285 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15296 void VmaAllocator_T::FreeMemory(
15297 size_t allocationCount,
15300 VMA_ASSERT(pAllocations);
15302 for(
size_t allocIndex = allocationCount; allocIndex--; )
15306 if(allocation != VK_NULL_HANDLE)
15308 if(TouchAllocation(allocation))
15310 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15312 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15315 switch(allocation->GetType())
15317 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15319 VmaBlockVector* pBlockVector = VMA_NULL;
15320 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15321 if(hPool != VK_NULL_HANDLE)
15323 pBlockVector = &hPool->m_BlockVector;
15327 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15328 pBlockVector = m_pBlockVectors[memTypeIndex];
15330 pBlockVector->Free(allocation);
15333 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15334 FreeDedicatedMemory(allocation);
15342 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15343 allocation->SetUserData(
this, VMA_NULL);
15344 allocation->Dtor();
15345 m_AllocationObjectAllocator.Free(allocation);
15350 VkResult VmaAllocator_T::ResizeAllocation(
15352 VkDeviceSize newSize)
15355 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15357 return VK_ERROR_VALIDATION_FAILED_EXT;
15359 if(newSize == alloc->GetSize())
15363 return VK_ERROR_OUT_OF_POOL_MEMORY;
15366 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15369 InitStatInfo(pStats->
total);
15370 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15372 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15376 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15378 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15379 VMA_ASSERT(pBlockVector);
15380 pBlockVector->AddStats(pStats);
15385 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15386 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15388 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15393 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15395 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15396 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15397 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15398 VMA_ASSERT(pDedicatedAllocVector);
15399 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15402 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15403 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15404 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15405 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15410 VmaPostprocessCalcStatInfo(pStats->
total);
15411 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15412 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15413 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15414 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15417 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15419 #if VMA_MEMORY_BUDGET
15420 if(m_UseExtMemoryBudget)
15422 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15424 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15425 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15427 const uint32_t heapIndex = firstHeap + i;
15429 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15432 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15434 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15435 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15439 outBudget->
usage = 0;
15443 outBudget->
budget = VMA_MIN(
15444 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15449 UpdateVulkanBudget();
15450 GetBudget(outBudget, firstHeap, heapCount);
15456 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15458 const uint32_t heapIndex = firstHeap + i;
15460 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15464 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15469 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15471 VkResult VmaAllocator_T::DefragmentationBegin(
15481 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15482 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15485 (*pContext)->AddAllocations(
15488 VkResult res = (*pContext)->Defragment(
15493 if(res != VK_NOT_READY)
15495 vma_delete(
this, *pContext);
15496 *pContext = VMA_NULL;
15502 VkResult VmaAllocator_T::DefragmentationEnd(
15505 vma_delete(
this, context);
15511 if(hAllocation->CanBecomeLost())
15517 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15518 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15521 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15525 pAllocationInfo->
offset = 0;
15526 pAllocationInfo->
size = hAllocation->GetSize();
15528 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15531 else if(localLastUseFrameIndex == localCurrFrameIndex)
15533 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15534 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15535 pAllocationInfo->
offset = hAllocation->GetOffset();
15536 pAllocationInfo->
size = hAllocation->GetSize();
15538 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15543 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15545 localLastUseFrameIndex = localCurrFrameIndex;
15552 #if VMA_STATS_STRING_ENABLED
15553 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15554 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15557 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15558 if(localLastUseFrameIndex == localCurrFrameIndex)
15564 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15566 localLastUseFrameIndex = localCurrFrameIndex;
15572 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15573 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15574 pAllocationInfo->
offset = hAllocation->GetOffset();
15575 pAllocationInfo->
size = hAllocation->GetSize();
15576 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15577 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15581 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15584 if(hAllocation->CanBecomeLost())
15586 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15587 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15590 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15594 else if(localLastUseFrameIndex == localCurrFrameIndex)
15600 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15602 localLastUseFrameIndex = localCurrFrameIndex;
15609 #if VMA_STATS_STRING_ENABLED
15610 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15611 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15614 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15615 if(localLastUseFrameIndex == localCurrFrameIndex)
15621 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15623 localLastUseFrameIndex = localCurrFrameIndex;
15635 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15645 return VK_ERROR_INITIALIZATION_FAILED;
15648 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15650 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15652 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15653 if(res != VK_SUCCESS)
15655 vma_delete(
this, *pPool);
15662 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15663 (*pPool)->SetId(m_NextPoolId++);
15664 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15670 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15674 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15675 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15676 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15679 vma_delete(
this, pool);
15684 pool->m_BlockVector.GetPoolStats(pPoolStats);
15687 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15689 m_CurrentFrameIndex.store(frameIndex);
15691 #if VMA_MEMORY_BUDGET
15692 if(m_UseExtMemoryBudget)
15694 UpdateVulkanBudget();
15696 #endif // #if VMA_MEMORY_BUDGET
15699 void VmaAllocator_T::MakePoolAllocationsLost(
15701 size_t* pLostAllocationCount)
15703 hPool->m_BlockVector.MakePoolAllocationsLost(
15704 m_CurrentFrameIndex.load(),
15705 pLostAllocationCount);
15708 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15710 return hPool->m_BlockVector.CheckCorruption();
15713 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15715 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15718 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15720 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15722 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15723 VMA_ASSERT(pBlockVector);
15724 VkResult localRes = pBlockVector->CheckCorruption();
15727 case VK_ERROR_FEATURE_NOT_PRESENT:
15730 finalRes = VK_SUCCESS;
15740 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15741 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15743 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15745 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15748 case VK_ERROR_FEATURE_NOT_PRESENT:
15751 finalRes = VK_SUCCESS;
15763 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15765 *pAllocation = m_AllocationObjectAllocator.Allocate();
15766 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15767 (*pAllocation)->InitLost();
15770 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15772 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15775 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
15777 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15778 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15781 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
15782 if(blockBytesAfterAllocation > heapSize)
15784 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15786 if(m_Budget.m_BlockBytes->compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15794 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
15798 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15800 if(res == VK_SUCCESS)
15802 #if VMA_MEMORY_BUDGET
15803 ++m_Budget.m_OperationsSinceBudgetFetch;
15807 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15809 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15814 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
15820 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15823 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15825 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15829 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15831 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
15834 VkResult VmaAllocator_T::BindVulkanBuffer(
15835 VkDeviceMemory memory,
15836 VkDeviceSize memoryOffset,
15840 if(pNext != VMA_NULL)
15842 #if VMA_BIND_MEMORY2
15843 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15845 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15846 bindBufferMemoryInfo.pNext = pNext;
15847 bindBufferMemoryInfo.buffer = buffer;
15848 bindBufferMemoryInfo.memory = memory;
15849 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15850 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15853 #endif // #if VMA_BIND_MEMORY2
15855 return VK_ERROR_EXTENSION_NOT_PRESENT;
15860 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15864 VkResult VmaAllocator_T::BindVulkanImage(
15865 VkDeviceMemory memory,
15866 VkDeviceSize memoryOffset,
15870 if(pNext != VMA_NULL)
15872 #if VMA_BIND_MEMORY2
15873 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15875 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15876 bindBufferMemoryInfo.pNext = pNext;
15877 bindBufferMemoryInfo.image = image;
15878 bindBufferMemoryInfo.memory = memory;
15879 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15880 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15883 #endif // #if VMA_BIND_MEMORY2
15885 return VK_ERROR_EXTENSION_NOT_PRESENT;
15890 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15894 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15896 if(hAllocation->CanBecomeLost())
15898 return VK_ERROR_MEMORY_MAP_FAILED;
15901 switch(hAllocation->GetType())
15903 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15905 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15906 char *pBytes = VMA_NULL;
15907 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15908 if(res == VK_SUCCESS)
15910 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15911 hAllocation->BlockAllocMap();
15915 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15916 return hAllocation->DedicatedAllocMap(
this, ppData);
15919 return VK_ERROR_MEMORY_MAP_FAILED;
15925 switch(hAllocation->GetType())
15927 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15929 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15930 hAllocation->BlockAllocUnmap();
15931 pBlock->Unmap(
this, 1);
15934 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15935 hAllocation->DedicatedAllocUnmap(
this);
15942 VkResult VmaAllocator_T::BindBufferMemory(
15944 VkDeviceSize allocationLocalOffset,
15948 VkResult res = VK_SUCCESS;
15949 switch(hAllocation->GetType())
15951 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15952 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
15954 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15956 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15957 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15958 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
15967 VkResult VmaAllocator_T::BindImageMemory(
15969 VkDeviceSize allocationLocalOffset,
15973 VkResult res = VK_SUCCESS;
15974 switch(hAllocation->GetType())
15976 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15977 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
15979 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15981 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15982 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15983 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
15992 void VmaAllocator_T::FlushOrInvalidateAllocation(
15994 VkDeviceSize offset, VkDeviceSize size,
15995 VMA_CACHE_OPERATION op)
15997 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15998 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16000 const VkDeviceSize allocationSize = hAllocation->GetSize();
16001 VMA_ASSERT(offset <= allocationSize);
16003 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16005 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16006 memRange.memory = hAllocation->GetMemory();
16008 switch(hAllocation->GetType())
16010 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16011 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16012 if(size == VK_WHOLE_SIZE)
16014 memRange.size = allocationSize - memRange.offset;
16018 VMA_ASSERT(offset + size <= allocationSize);
16019 memRange.size = VMA_MIN(
16020 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16021 allocationSize - memRange.offset);
16025 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16028 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16029 if(size == VK_WHOLE_SIZE)
16031 size = allocationSize - offset;
16035 VMA_ASSERT(offset + size <= allocationSize);
16037 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16040 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16041 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16042 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16043 memRange.offset += allocationOffset;
16044 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16055 case VMA_CACHE_FLUSH:
16056 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16058 case VMA_CACHE_INVALIDATE:
16059 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16068 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16070 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16072 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16074 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16075 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16076 VMA_ASSERT(pDedicatedAllocations);
16077 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16078 VMA_ASSERT(success);
16081 VkDeviceMemory hMemory = allocation->GetMemory();
16093 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16095 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16098 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16100 VkBufferCreateInfo dummyBufCreateInfo;
16101 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16103 uint32_t memoryTypeBits = 0;
16106 VkBuffer buf = VK_NULL_HANDLE;
16107 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16108 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16109 if(res == VK_SUCCESS)
16112 VkMemoryRequirements memReq;
16113 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16114 memoryTypeBits = memReq.memoryTypeBits;
16117 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16120 return memoryTypeBits;
16123 #if VMA_MEMORY_BUDGET
16125 void VmaAllocator_T::UpdateVulkanBudget()
16127 VMA_ASSERT(m_UseExtMemoryBudget);
16129 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16131 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16132 memProps.pNext = &budgetProps;
16134 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16137 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16139 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16141 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16142 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16143 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16145 m_Budget.m_OperationsSinceBudgetFetch = 0;
16149 #endif // #if VMA_MEMORY_BUDGET
16151 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16153 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16154 !hAllocation->CanBecomeLost() &&
16155 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16157 void* pData = VMA_NULL;
16158 VkResult res = Map(hAllocation, &pData);
16159 if(res == VK_SUCCESS)
16161 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16162 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16163 Unmap(hAllocation);
16167 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16172 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16174 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16175 if(memoryTypeBits == UINT32_MAX)
16177 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16178 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16180 return memoryTypeBits;
16183 #if VMA_STATS_STRING_ENABLED
16185 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16187 bool dedicatedAllocationsStarted =
false;
16188 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16190 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16191 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16192 VMA_ASSERT(pDedicatedAllocVector);
16193 if(pDedicatedAllocVector->empty() ==
false)
16195 if(dedicatedAllocationsStarted ==
false)
16197 dedicatedAllocationsStarted =
true;
16198 json.WriteString(
"DedicatedAllocations");
16199 json.BeginObject();
16202 json.BeginString(
"Type ");
16203 json.ContinueString(memTypeIndex);
16208 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16210 json.BeginObject(
true);
16212 hAlloc->PrintParameters(json);
16219 if(dedicatedAllocationsStarted)
16225 bool allocationsStarted =
false;
16226 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16228 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16230 if(allocationsStarted ==
false)
16232 allocationsStarted =
true;
16233 json.WriteString(
"DefaultPools");
16234 json.BeginObject();
16237 json.BeginString(
"Type ");
16238 json.ContinueString(memTypeIndex);
16241 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16244 if(allocationsStarted)
16252 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16253 const size_t poolCount = m_Pools.size();
16256 json.WriteString(
"Pools");
16257 json.BeginObject();
16258 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16260 json.BeginString();
16261 json.ContinueString(m_Pools[poolIndex]->GetId());
16264 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16271 #endif // #if VMA_STATS_STRING_ENABLED
16280 VMA_ASSERT(pCreateInfo && pAllocator);
16281 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16283 return (*pAllocator)->Init(pCreateInfo);
16289 if(allocator != VK_NULL_HANDLE)
16291 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16292 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16293 vma_delete(&allocationCallbacks, allocator);
16299 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16301 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16302 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16307 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16309 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16310 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16315 uint32_t memoryTypeIndex,
16316 VkMemoryPropertyFlags* pFlags)
16318 VMA_ASSERT(allocator && pFlags);
16319 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16320 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16325 uint32_t frameIndex)
16327 VMA_ASSERT(allocator);
16328 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16330 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16332 allocator->SetCurrentFrameIndex(frameIndex);
16339 VMA_ASSERT(allocator && pStats);
16340 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16341 allocator->CalculateStats(pStats);
16348 VMA_ASSERT(allocator && pBudget);
16349 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16350 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16353 #if VMA_STATS_STRING_ENABLED
16357 char** ppStatsString,
16358 VkBool32 detailedMap)
16360 VMA_ASSERT(allocator && ppStatsString);
16361 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16363 VmaStringBuilder sb(allocator);
16365 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16366 json.BeginObject();
16369 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
16372 allocator->CalculateStats(&stats);
16374 json.WriteString(
"Total");
16375 VmaPrintStatInfo(json, stats.
total);
16377 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16379 json.BeginString(
"Heap ");
16380 json.ContinueString(heapIndex);
16382 json.BeginObject();
16384 json.WriteString(
"Size");
16385 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16387 json.WriteString(
"Flags");
16388 json.BeginArray(
true);
16389 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16391 json.WriteString(
"DEVICE_LOCAL");
16395 json.WriteString(
"Budget");
16396 json.BeginObject();
16398 json.WriteString(
"BlockBytes");
16399 json.WriteNumber(budget[heapIndex].blockBytes);
16400 json.WriteString(
"AllocationBytes");
16401 json.WriteNumber(budget[heapIndex].allocationBytes);
16402 json.WriteString(
"Usage");
16403 json.WriteNumber(budget[heapIndex].usage);
16404 json.WriteString(
"Budget");
16405 json.WriteNumber(budget[heapIndex].budget);
16411 json.WriteString(
"Stats");
16412 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16415 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16417 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16419 json.BeginString(
"Type ");
16420 json.ContinueString(typeIndex);
16423 json.BeginObject();
16425 json.WriteString(
"Flags");
16426 json.BeginArray(
true);
16427 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16428 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16430 json.WriteString(
"DEVICE_LOCAL");
16432 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16434 json.WriteString(
"HOST_VISIBLE");
16436 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
16438 json.WriteString(
"HOST_COHERENT");
16440 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
16442 json.WriteString(
"HOST_CACHED");
16444 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
16446 json.WriteString(
"LAZILY_ALLOCATED");
16452 json.WriteString(
"Stats");
16453 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
16462 if(detailedMap == VK_TRUE)
16464 allocator->PrintDetailedMap(json);
16470 const size_t len = sb.GetLength();
16471 char*
const pChars = vma_new_array(allocator,
char, len + 1);
16474 memcpy(pChars, sb.GetData(), len);
16476 pChars[len] =
'\0';
16477 *ppStatsString = pChars;
16482 char* pStatsString)
16484 if(pStatsString != VMA_NULL)
16486 VMA_ASSERT(allocator);
16487 size_t len = strlen(pStatsString);
16488 vma_delete_array(allocator, pStatsString, len + 1);
16492 #endif // #if VMA_STATS_STRING_ENABLED
16499 uint32_t memoryTypeBits,
16501 uint32_t* pMemoryTypeIndex)
16503 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16504 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16505 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16512 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
16513 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
16514 uint32_t notPreferredFlags = 0;
16517 switch(pAllocationCreateInfo->
usage)
16522 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16524 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16528 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
16531 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16532 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16534 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16538 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16539 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16542 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16545 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
16552 *pMemoryTypeIndex = UINT32_MAX;
16553 uint32_t minCost = UINT32_MAX;
16554 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16555 memTypeIndex < allocator->GetMemoryTypeCount();
16556 ++memTypeIndex, memTypeBit <<= 1)
16559 if((memTypeBit & memoryTypeBits) != 0)
16561 const VkMemoryPropertyFlags currFlags =
16562 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16564 if((requiredFlags & ~currFlags) == 0)
16567 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
16568 VmaCountBitsSet(currFlags & notPreferredFlags);
16570 if(currCost < minCost)
16572 *pMemoryTypeIndex = memTypeIndex;
16577 minCost = currCost;
16582 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16587 const VkBufferCreateInfo* pBufferCreateInfo,
16589 uint32_t* pMemoryTypeIndex)
16591 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16592 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16593 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16594 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16596 const VkDevice hDev = allocator->m_hDevice;
16597 VkBuffer hBuffer = VK_NULL_HANDLE;
16598 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16599 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16600 if(res == VK_SUCCESS)
16602 VkMemoryRequirements memReq = {};
16603 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16604 hDev, hBuffer, &memReq);
16608 memReq.memoryTypeBits,
16609 pAllocationCreateInfo,
16612 allocator->GetVulkanFunctions().vkDestroyBuffer(
16613 hDev, hBuffer, allocator->GetAllocationCallbacks());
16620 const VkImageCreateInfo* pImageCreateInfo,
16622 uint32_t* pMemoryTypeIndex)
16624 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16625 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16626 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16627 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16629 const VkDevice hDev = allocator->m_hDevice;
16630 VkImage hImage = VK_NULL_HANDLE;
16631 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16632 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16633 if(res == VK_SUCCESS)
16635 VkMemoryRequirements memReq = {};
16636 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16637 hDev, hImage, &memReq);
16641 memReq.memoryTypeBits,
16642 pAllocationCreateInfo,
16645 allocator->GetVulkanFunctions().vkDestroyImage(
16646 hDev, hImage, allocator->GetAllocationCallbacks());
16656 VMA_ASSERT(allocator && pCreateInfo && pPool);
16658 VMA_DEBUG_LOG(
"vmaCreatePool");
16660 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16662 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16664 #if VMA_RECORDING_ENABLED
16665 if(allocator->GetRecorder() != VMA_NULL)
16667 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16678 VMA_ASSERT(allocator);
16680 if(pool == VK_NULL_HANDLE)
16685 VMA_DEBUG_LOG(
"vmaDestroyPool");
16687 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16689 #if VMA_RECORDING_ENABLED
16690 if(allocator->GetRecorder() != VMA_NULL)
16692 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16696 allocator->DestroyPool(pool);
16704 VMA_ASSERT(allocator && pool && pPoolStats);
16706 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16708 allocator->GetPoolStats(pool, pPoolStats);
16714 size_t* pLostAllocationCount)
16716 VMA_ASSERT(allocator && pool);
16718 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16720 #if VMA_RECORDING_ENABLED
16721 if(allocator->GetRecorder() != VMA_NULL)
16723 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16727 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16732 VMA_ASSERT(allocator && pool);
16734 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16736 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16738 return allocator->CheckPoolCorruption(pool);
16744 const char** ppName)
16746 VMA_ASSERT(allocator && pool);
16748 VMA_DEBUG_LOG(
"vmaGetPoolName");
16750 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16752 *ppName = pool->GetName();
16760 VMA_ASSERT(allocator && pool);
16762 VMA_DEBUG_LOG(
"vmaSetPoolName");
16764 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16766 pool->SetName(pName);
16768 #if VMA_RECORDING_ENABLED
16769 if(allocator->GetRecorder() != VMA_NULL)
16771 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
16778 const VkMemoryRequirements* pVkMemoryRequirements,
16783 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16785 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16787 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16789 VkResult result = allocator->AllocateMemory(
16790 *pVkMemoryRequirements,
16796 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16800 #if VMA_RECORDING_ENABLED
16801 if(allocator->GetRecorder() != VMA_NULL)
16803 allocator->GetRecorder()->RecordAllocateMemory(
16804 allocator->GetCurrentFrameIndex(),
16805 *pVkMemoryRequirements,
16811 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16813 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16821 const VkMemoryRequirements* pVkMemoryRequirements,
16823 size_t allocationCount,
16827 if(allocationCount == 0)
16832 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16834 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16836 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16838 VkResult result = allocator->AllocateMemory(
16839 *pVkMemoryRequirements,
16845 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16849 #if VMA_RECORDING_ENABLED
16850 if(allocator->GetRecorder() != VMA_NULL)
16852 allocator->GetRecorder()->RecordAllocateMemoryPages(
16853 allocator->GetCurrentFrameIndex(),
16854 *pVkMemoryRequirements,
16856 (uint64_t)allocationCount,
16861 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16863 for(
size_t i = 0; i < allocationCount; ++i)
16865 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16879 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16881 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16883 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16885 VkMemoryRequirements vkMemReq = {};
16886 bool requiresDedicatedAllocation =
false;
16887 bool prefersDedicatedAllocation =
false;
16888 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16889 requiresDedicatedAllocation,
16890 prefersDedicatedAllocation);
16892 VkResult result = allocator->AllocateMemory(
16894 requiresDedicatedAllocation,
16895 prefersDedicatedAllocation,
16899 VMA_SUBALLOCATION_TYPE_BUFFER,
16903 #if VMA_RECORDING_ENABLED
16904 if(allocator->GetRecorder() != VMA_NULL)
16906 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16907 allocator->GetCurrentFrameIndex(),
16909 requiresDedicatedAllocation,
16910 prefersDedicatedAllocation,
16916 if(pAllocationInfo && result == VK_SUCCESS)
16918 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16931 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16933 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16935 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16937 VkMemoryRequirements vkMemReq = {};
16938 bool requiresDedicatedAllocation =
false;
16939 bool prefersDedicatedAllocation =
false;
16940 allocator->GetImageMemoryRequirements(image, vkMemReq,
16941 requiresDedicatedAllocation, prefersDedicatedAllocation);
16943 VkResult result = allocator->AllocateMemory(
16945 requiresDedicatedAllocation,
16946 prefersDedicatedAllocation,
16950 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16954 #if VMA_RECORDING_ENABLED
16955 if(allocator->GetRecorder() != VMA_NULL)
16957 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16958 allocator->GetCurrentFrameIndex(),
16960 requiresDedicatedAllocation,
16961 prefersDedicatedAllocation,
16967 if(pAllocationInfo && result == VK_SUCCESS)
16969 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16979 VMA_ASSERT(allocator);
16981 if(allocation == VK_NULL_HANDLE)
16986 VMA_DEBUG_LOG(
"vmaFreeMemory");
16988 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16990 #if VMA_RECORDING_ENABLED
16991 if(allocator->GetRecorder() != VMA_NULL)
16993 allocator->GetRecorder()->RecordFreeMemory(
16994 allocator->GetCurrentFrameIndex(),
16999 allocator->FreeMemory(
17006 size_t allocationCount,
17009 if(allocationCount == 0)
17014 VMA_ASSERT(allocator);
17016 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17018 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17020 #if VMA_RECORDING_ENABLED
17021 if(allocator->GetRecorder() != VMA_NULL)
17023 allocator->GetRecorder()->RecordFreeMemoryPages(
17024 allocator->GetCurrentFrameIndex(),
17025 (uint64_t)allocationCount,
17030 allocator->FreeMemory(allocationCount, pAllocations);
17036 VkDeviceSize newSize)
17038 VMA_ASSERT(allocator && allocation);
17040 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17042 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17044 return allocator->ResizeAllocation(allocation, newSize);
17052 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17054 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17056 #if VMA_RECORDING_ENABLED
17057 if(allocator->GetRecorder() != VMA_NULL)
17059 allocator->GetRecorder()->RecordGetAllocationInfo(
17060 allocator->GetCurrentFrameIndex(),
17065 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17072 VMA_ASSERT(allocator && allocation);
17074 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17076 #if VMA_RECORDING_ENABLED
17077 if(allocator->GetRecorder() != VMA_NULL)
17079 allocator->GetRecorder()->RecordTouchAllocation(
17080 allocator->GetCurrentFrameIndex(),
17085 return allocator->TouchAllocation(allocation);
17093 VMA_ASSERT(allocator && allocation);
17095 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17097 allocation->SetUserData(allocator, pUserData);
17099 #if VMA_RECORDING_ENABLED
17100 if(allocator->GetRecorder() != VMA_NULL)
17102 allocator->GetRecorder()->RecordSetAllocationUserData(
17103 allocator->GetCurrentFrameIndex(),
17114 VMA_ASSERT(allocator && pAllocation);
17116 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17118 allocator->CreateLostAllocation(pAllocation);
17120 #if VMA_RECORDING_ENABLED
17121 if(allocator->GetRecorder() != VMA_NULL)
17123 allocator->GetRecorder()->RecordCreateLostAllocation(
17124 allocator->GetCurrentFrameIndex(),
17135 VMA_ASSERT(allocator && allocation && ppData);
17137 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17139 VkResult res = allocator->Map(allocation, ppData);
17141 #if VMA_RECORDING_ENABLED
17142 if(allocator->GetRecorder() != VMA_NULL)
17144 allocator->GetRecorder()->RecordMapMemory(
17145 allocator->GetCurrentFrameIndex(),
17157 VMA_ASSERT(allocator && allocation);
17159 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17161 #if VMA_RECORDING_ENABLED
17162 if(allocator->GetRecorder() != VMA_NULL)
17164 allocator->GetRecorder()->RecordUnmapMemory(
17165 allocator->GetCurrentFrameIndex(),
17170 allocator->Unmap(allocation);
17175 VMA_ASSERT(allocator && allocation);
17177 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17179 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17181 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17183 #if VMA_RECORDING_ENABLED
17184 if(allocator->GetRecorder() != VMA_NULL)
17186 allocator->GetRecorder()->RecordFlushAllocation(
17187 allocator->GetCurrentFrameIndex(),
17188 allocation, offset, size);
17195 VMA_ASSERT(allocator && allocation);
17197 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17199 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17201 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17203 #if VMA_RECORDING_ENABLED
17204 if(allocator->GetRecorder() != VMA_NULL)
17206 allocator->GetRecorder()->RecordInvalidateAllocation(
17207 allocator->GetCurrentFrameIndex(),
17208 allocation, offset, size);
17215 VMA_ASSERT(allocator);
17217 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17219 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17221 return allocator->CheckCorruption(memoryTypeBits);
17227 size_t allocationCount,
17228 VkBool32* pAllocationsChanged,
17238 if(pDefragmentationInfo != VMA_NULL)
17252 if(res == VK_NOT_READY)
17265 VMA_ASSERT(allocator && pInfo && pContext);
17276 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17278 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17280 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17282 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17284 #if VMA_RECORDING_ENABLED
17285 if(allocator->GetRecorder() != VMA_NULL)
17287 allocator->GetRecorder()->RecordDefragmentationBegin(
17288 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17299 VMA_ASSERT(allocator);
17301 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17303 if(context != VK_NULL_HANDLE)
17305 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17307 #if VMA_RECORDING_ENABLED
17308 if(allocator->GetRecorder() != VMA_NULL)
17310 allocator->GetRecorder()->RecordDefragmentationEnd(
17311 allocator->GetCurrentFrameIndex(), context);
17315 return allocator->DefragmentationEnd(context);
17328 VMA_ASSERT(allocator && allocation && buffer);
17330 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17332 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17334 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17340 VkDeviceSize allocationLocalOffset,
17344 VMA_ASSERT(allocator && allocation && buffer);
17346 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17348 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17350 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17358 VMA_ASSERT(allocator && allocation && image);
17360 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17362 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17364 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17370 VkDeviceSize allocationLocalOffset,
17374 VMA_ASSERT(allocator && allocation && image);
17376 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
17378 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17380 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
17385 const VkBufferCreateInfo* pBufferCreateInfo,
17391 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
17393 if(pBufferCreateInfo->size == 0)
17395 return VK_ERROR_VALIDATION_FAILED_EXT;
17398 VMA_DEBUG_LOG(
"vmaCreateBuffer");
17400 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17402 *pBuffer = VK_NULL_HANDLE;
17403 *pAllocation = VK_NULL_HANDLE;
17406 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17407 allocator->m_hDevice,
17409 allocator->GetAllocationCallbacks(),
17414 VkMemoryRequirements vkMemReq = {};
17415 bool requiresDedicatedAllocation =
false;
17416 bool prefersDedicatedAllocation =
false;
17417 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17418 requiresDedicatedAllocation, prefersDedicatedAllocation);
17422 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
17424 VMA_ASSERT(vkMemReq.alignment %
17425 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
17427 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
17429 VMA_ASSERT(vkMemReq.alignment %
17430 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
17432 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
17434 VMA_ASSERT(vkMemReq.alignment %
17435 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
17439 res = allocator->AllocateMemory(
17441 requiresDedicatedAllocation,
17442 prefersDedicatedAllocation,
17445 *pAllocationCreateInfo,
17446 VMA_SUBALLOCATION_TYPE_BUFFER,
17450 #if VMA_RECORDING_ENABLED
17451 if(allocator->GetRecorder() != VMA_NULL)
17453 allocator->GetRecorder()->RecordCreateBuffer(
17454 allocator->GetCurrentFrameIndex(),
17455 *pBufferCreateInfo,
17456 *pAllocationCreateInfo,
17466 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17471 #if VMA_STATS_STRING_ENABLED
17472 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17474 if(pAllocationInfo != VMA_NULL)
17476 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17481 allocator->FreeMemory(
17484 *pAllocation = VK_NULL_HANDLE;
17485 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17486 *pBuffer = VK_NULL_HANDLE;
17489 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17490 *pBuffer = VK_NULL_HANDLE;
17501 VMA_ASSERT(allocator);
17503 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17508 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
17510 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17512 #if VMA_RECORDING_ENABLED
17513 if(allocator->GetRecorder() != VMA_NULL)
17515 allocator->GetRecorder()->RecordDestroyBuffer(
17516 allocator->GetCurrentFrameIndex(),
17521 if(buffer != VK_NULL_HANDLE)
17523 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17526 if(allocation != VK_NULL_HANDLE)
17528 allocator->FreeMemory(
17536 const VkImageCreateInfo* pImageCreateInfo,
17542 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17544 if(pImageCreateInfo->extent.width == 0 ||
17545 pImageCreateInfo->extent.height == 0 ||
17546 pImageCreateInfo->extent.depth == 0 ||
17547 pImageCreateInfo->mipLevels == 0 ||
17548 pImageCreateInfo->arrayLayers == 0)
17550 return VK_ERROR_VALIDATION_FAILED_EXT;
17553 VMA_DEBUG_LOG(
"vmaCreateImage");
17555 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17557 *pImage = VK_NULL_HANDLE;
17558 *pAllocation = VK_NULL_HANDLE;
17561 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17562 allocator->m_hDevice,
17564 allocator->GetAllocationCallbacks(),
17568 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
17569 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17570 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17573 VkMemoryRequirements vkMemReq = {};
17574 bool requiresDedicatedAllocation =
false;
17575 bool prefersDedicatedAllocation =
false;
17576 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17577 requiresDedicatedAllocation, prefersDedicatedAllocation);
17579 res = allocator->AllocateMemory(
17581 requiresDedicatedAllocation,
17582 prefersDedicatedAllocation,
17585 *pAllocationCreateInfo,
17590 #if VMA_RECORDING_ENABLED
17591 if(allocator->GetRecorder() != VMA_NULL)
17593 allocator->GetRecorder()->RecordCreateImage(
17594 allocator->GetCurrentFrameIndex(),
17596 *pAllocationCreateInfo,
17606 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17611 #if VMA_STATS_STRING_ENABLED
17612 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17614 if(pAllocationInfo != VMA_NULL)
17616 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17621 allocator->FreeMemory(
17624 *pAllocation = VK_NULL_HANDLE;
17625 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17626 *pImage = VK_NULL_HANDLE;
17629 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17630 *pImage = VK_NULL_HANDLE;
17641 VMA_ASSERT(allocator);
17643 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17648 VMA_DEBUG_LOG(
"vmaDestroyImage");
17650 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17652 #if VMA_RECORDING_ENABLED
17653 if(allocator->GetRecorder() != VMA_NULL)
17655 allocator->GetRecorder()->RecordDestroyImage(
17656 allocator->GetCurrentFrameIndex(),
17661 if(image != VK_NULL_HANDLE)
17663 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17665 if(allocation != VK_NULL_HANDLE)
17667 allocator->FreeMemory(
17673 #endif // #ifdef VMA_IMPLEMENTATION