Vulkan Memory Allocator
vk_mem_alloc.h
Go to the documentation of this file.
1 //
2 // Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved.
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21 //
22 
23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
25 
26 #ifdef __cplusplus
27 extern "C" {
28 #endif
29 
1082 #include <vulkan/vulkan.h>
1083 
1084 #if !defined(VMA_DEDICATED_ALLOCATION)
1085  #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1086  #define VMA_DEDICATED_ALLOCATION 1
1087  #else
1088  #define VMA_DEDICATED_ALLOCATION 0
1089  #endif
1090 #endif
1091 
1101 VK_DEFINE_HANDLE(VmaAllocator)
1102 
1103 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
1105  VmaAllocator allocator,
1106  uint32_t memoryType,
1107  VkDeviceMemory memory,
1108  VkDeviceSize size);
1110 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
1111  VmaAllocator allocator,
1112  uint32_t memoryType,
1113  VkDeviceMemory memory,
1114  VkDeviceSize size);
1115 
1129 
1159 
1162 typedef VkFlags VmaAllocatorCreateFlags;
1163 
1168 typedef struct VmaVulkanFunctions {
1169  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
1170  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
1171  PFN_vkAllocateMemory vkAllocateMemory;
1172  PFN_vkFreeMemory vkFreeMemory;
1173  PFN_vkMapMemory vkMapMemory;
1174  PFN_vkUnmapMemory vkUnmapMemory;
1175  PFN_vkBindBufferMemory vkBindBufferMemory;
1176  PFN_vkBindImageMemory vkBindImageMemory;
1177  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
1178  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
1179  PFN_vkCreateBuffer vkCreateBuffer;
1180  PFN_vkDestroyBuffer vkDestroyBuffer;
1181  PFN_vkCreateImage vkCreateImage;
1182  PFN_vkDestroyImage vkDestroyImage;
1183 #if VMA_DEDICATED_ALLOCATION
1184  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1185  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1186 #endif
1188 
1191 {
1193  VmaAllocatorCreateFlags flags;
1195 
1196  VkPhysicalDevice physicalDevice;
1198 
1199  VkDevice device;
1201 
1204 
1205  const VkAllocationCallbacks* pAllocationCallbacks;
1207 
1246  const VkDeviceSize* pHeapSizeLimit;
1260 
1262 VkResult vmaCreateAllocator(
1263  const VmaAllocatorCreateInfo* pCreateInfo,
1264  VmaAllocator* pAllocator);
1265 
1267 void vmaDestroyAllocator(
1268  VmaAllocator allocator);
1269 
1275  VmaAllocator allocator,
1276  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1277 
1283  VmaAllocator allocator,
1284  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1285 
1293  VmaAllocator allocator,
1294  uint32_t memoryTypeIndex,
1295  VkMemoryPropertyFlags* pFlags);
1296 
1306  VmaAllocator allocator,
1307  uint32_t frameIndex);
1308 
1311 typedef struct VmaStatInfo
1312 {
1314  uint32_t blockCount;
1320  VkDeviceSize usedBytes;
1322  VkDeviceSize unusedBytes;
1323  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
1324  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
1325 } VmaStatInfo;
1326 
1328 typedef struct VmaStats
1329 {
1330  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
1331  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
1333 } VmaStats;
1334 
1336 void vmaCalculateStats(
1337  VmaAllocator allocator,
1338  VmaStats* pStats);
1339 
1340 #define VMA_STATS_STRING_ENABLED 1
1341 
1342 #if VMA_STATS_STRING_ENABLED
1343 
1345 
1347 void vmaBuildStatsString(
1348  VmaAllocator allocator,
1349  char** ppStatsString,
1350  VkBool32 detailedMap);
1351 
1352 void vmaFreeStatsString(
1353  VmaAllocator allocator,
1354  char* pStatsString);
1355 
1356 #endif // #if VMA_STATS_STRING_ENABLED
1357 
1366 VK_DEFINE_HANDLE(VmaPool)
1367 
1368 typedef enum VmaMemoryUsage
1369 {
1418 } VmaMemoryUsage;
1419 
1434 
1484 
1488 
1490 {
1492  VmaAllocationCreateFlags flags;
1503  VkMemoryPropertyFlags requiredFlags;
1508  VkMemoryPropertyFlags preferredFlags;
1516  uint32_t memoryTypeBits;
1529  void* pUserData;
1531 
1548 VkResult vmaFindMemoryTypeIndex(
1549  VmaAllocator allocator,
1550  uint32_t memoryTypeBits,
1551  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1552  uint32_t* pMemoryTypeIndex);
1553 
1567  VmaAllocator allocator,
1568  const VkBufferCreateInfo* pBufferCreateInfo,
1569  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1570  uint32_t* pMemoryTypeIndex);
1571 
1585  VmaAllocator allocator,
1586  const VkImageCreateInfo* pImageCreateInfo,
1587  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1588  uint32_t* pMemoryTypeIndex);
1589 
1610 
1613 typedef VkFlags VmaPoolCreateFlags;
1614 
1617 typedef struct VmaPoolCreateInfo {
1623  VmaPoolCreateFlags flags;
1628  VkDeviceSize blockSize;
1657 
1660 typedef struct VmaPoolStats {
1663  VkDeviceSize size;
1666  VkDeviceSize unusedSize;
1679  VkDeviceSize unusedRangeSizeMax;
1680 } VmaPoolStats;
1681 
1688 VkResult vmaCreatePool(
1689  VmaAllocator allocator,
1690  const VmaPoolCreateInfo* pCreateInfo,
1691  VmaPool* pPool);
1692 
1695 void vmaDestroyPool(
1696  VmaAllocator allocator,
1697  VmaPool pool);
1698 
1705 void vmaGetPoolStats(
1706  VmaAllocator allocator,
1707  VmaPool pool,
1708  VmaPoolStats* pPoolStats);
1709 
1717  VmaAllocator allocator,
1718  VmaPool pool,
1719  size_t* pLostAllocationCount);
1720 
1745 VK_DEFINE_HANDLE(VmaAllocation)
1746 
1747 
1749 typedef struct VmaAllocationInfo {
1754  uint32_t memoryType;
1763  VkDeviceMemory deviceMemory;
1768  VkDeviceSize offset;
1773  VkDeviceSize size;
1787  void* pUserData;
1789 
1800 VkResult vmaAllocateMemory(
1801  VmaAllocator allocator,
1802  const VkMemoryRequirements* pVkMemoryRequirements,
1803  const VmaAllocationCreateInfo* pCreateInfo,
1804  VmaAllocation* pAllocation,
1805  VmaAllocationInfo* pAllocationInfo);
1806 
1814  VmaAllocator allocator,
1815  VkBuffer buffer,
1816  const VmaAllocationCreateInfo* pCreateInfo,
1817  VmaAllocation* pAllocation,
1818  VmaAllocationInfo* pAllocationInfo);
1819 
1821 VkResult vmaAllocateMemoryForImage(
1822  VmaAllocator allocator,
1823  VkImage image,
1824  const VmaAllocationCreateInfo* pCreateInfo,
1825  VmaAllocation* pAllocation,
1826  VmaAllocationInfo* pAllocationInfo);
1827 
1829 void vmaFreeMemory(
1830  VmaAllocator allocator,
1831  VmaAllocation allocation);
1832 
1850  VmaAllocator allocator,
1851  VmaAllocation allocation,
1852  VmaAllocationInfo* pAllocationInfo);
1853 
1868 VkBool32 vmaTouchAllocation(
1869  VmaAllocator allocator,
1870  VmaAllocation allocation);
1871 
1886  VmaAllocator allocator,
1887  VmaAllocation allocation,
1888  void* pUserData);
1889 
1901  VmaAllocator allocator,
1902  VmaAllocation* pAllocation);
1903 
1938 VkResult vmaMapMemory(
1939  VmaAllocator allocator,
1940  VmaAllocation allocation,
1941  void** ppData);
1942 
1947 void vmaUnmapMemory(
1948  VmaAllocator allocator,
1949  VmaAllocation allocation);
1950 
1952 typedef struct VmaDefragmentationInfo {
1957  VkDeviceSize maxBytesToMove;
1964 
1966 typedef struct VmaDefragmentationStats {
1968  VkDeviceSize bytesMoved;
1970  VkDeviceSize bytesFreed;
1976 
2059 VkResult vmaDefragment(
2060  VmaAllocator allocator,
2061  VmaAllocation* pAllocations,
2062  size_t allocationCount,
2063  VkBool32* pAllocationsChanged,
2064  const VmaDefragmentationInfo *pDefragmentationInfo,
2065  VmaDefragmentationStats* pDefragmentationStats);
2066 
2079 VkResult vmaBindBufferMemory(
2080  VmaAllocator allocator,
2081  VmaAllocation allocation,
2082  VkBuffer buffer);
2083 
2096 VkResult vmaBindImageMemory(
2097  VmaAllocator allocator,
2098  VmaAllocation allocation,
2099  VkImage image);
2100 
2127 VkResult vmaCreateBuffer(
2128  VmaAllocator allocator,
2129  const VkBufferCreateInfo* pBufferCreateInfo,
2130  const VmaAllocationCreateInfo* pAllocationCreateInfo,
2131  VkBuffer* pBuffer,
2132  VmaAllocation* pAllocation,
2133  VmaAllocationInfo* pAllocationInfo);
2134 
2146 void vmaDestroyBuffer(
2147  VmaAllocator allocator,
2148  VkBuffer buffer,
2149  VmaAllocation allocation);
2150 
2152 VkResult vmaCreateImage(
2153  VmaAllocator allocator,
2154  const VkImageCreateInfo* pImageCreateInfo,
2155  const VmaAllocationCreateInfo* pAllocationCreateInfo,
2156  VkImage* pImage,
2157  VmaAllocation* pAllocation,
2158  VmaAllocationInfo* pAllocationInfo);
2159 
2171 void vmaDestroyImage(
2172  VmaAllocator allocator,
2173  VkImage image,
2174  VmaAllocation allocation);
2175 
2176 #ifdef __cplusplus
2177 }
2178 #endif
2179 
2180 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
2181 
2182 // For Visual Studio IntelliSense.
2183 #ifdef __INTELLISENSE__
2184 #define VMA_IMPLEMENTATION
2185 #endif
2186 
2187 #ifdef VMA_IMPLEMENTATION
2188 #undef VMA_IMPLEMENTATION
2189 
2190 #include <cstdint>
2191 #include <cstdlib>
2192 #include <cstring>
2193 
2194 /*******************************************************************************
2195 CONFIGURATION SECTION
2196 
2197 Define some of these macros before each #include of this header or change them
2198 here if you need other then default behavior depending on your environment.
2199 */
2200 
2201 /*
2202 Define this macro to 1 to make the library fetch pointers to Vulkan functions
2203 internally, like:
2204 
2205  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
2206 
2207 Define to 0 if you are going to provide you own pointers to Vulkan functions via
2208 VmaAllocatorCreateInfo::pVulkanFunctions.
2209 */
2210 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
2211 #define VMA_STATIC_VULKAN_FUNCTIONS 1
2212 #endif
2213 
2214 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
2215 //#define VMA_USE_STL_CONTAINERS 1
2216 
2217 /* Set this macro to 1 to make the library including and using STL containers:
2218 std::pair, std::vector, std::list, std::unordered_map.
2219 
2220 Set it to 0 or undefined to make the library using its own implementation of
2221 the containers.
2222 */
2223 #if VMA_USE_STL_CONTAINERS
2224  #define VMA_USE_STL_VECTOR 1
2225  #define VMA_USE_STL_UNORDERED_MAP 1
2226  #define VMA_USE_STL_LIST 1
2227 #endif
2228 
2229 #if VMA_USE_STL_VECTOR
2230  #include <vector>
2231 #endif
2232 
2233 #if VMA_USE_STL_UNORDERED_MAP
2234  #include <unordered_map>
2235 #endif
2236 
2237 #if VMA_USE_STL_LIST
2238  #include <list>
2239 #endif
2240 
2241 /*
2242 Following headers are used in this CONFIGURATION section only, so feel free to
2243 remove them if not needed.
2244 */
2245 #include <cassert> // for assert
2246 #include <algorithm> // for min, max
2247 #include <mutex> // for std::mutex
2248 #include <atomic> // for std::atomic
2249 
2250 #if !defined(_WIN32) && !defined(__APPLE__)
2251  #include <malloc.h> // for aligned_alloc()
2252 #endif
2253 
2254 #ifndef VMA_NULL
2255  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
2256  #define VMA_NULL nullptr
2257 #endif
2258 
2259 #if defined(__APPLE__) || defined(__ANDROID__)
2260 #include <cstdlib>
2261 void *aligned_alloc(size_t alignment, size_t size)
2262 {
2263  // alignment must be >= sizeof(void*)
2264  if(alignment < sizeof(void*))
2265  {
2266  alignment = sizeof(void*);
2267  }
2268 
2269  void *pointer;
2270  if(posix_memalign(&pointer, alignment, size) == 0)
2271  return pointer;
2272  return VMA_NULL;
2273 }
2274 #endif
2275 
2276 // Normal assert to check for programmer's errors, especially in Debug configuration.
2277 #ifndef VMA_ASSERT
2278  #ifdef _DEBUG
2279  #define VMA_ASSERT(expr) assert(expr)
2280  #else
2281  #define VMA_ASSERT(expr)
2282  #endif
2283 #endif
2284 
2285 // Assert that will be called very often, like inside data structures e.g. operator[].
2286 // Making it non-empty can make program slow.
2287 #ifndef VMA_HEAVY_ASSERT
2288  #ifdef _DEBUG
2289  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
2290  #else
2291  #define VMA_HEAVY_ASSERT(expr)
2292  #endif
2293 #endif
2294 
2295 #ifndef VMA_ALIGN_OF
2296  #define VMA_ALIGN_OF(type) (__alignof(type))
2297 #endif
2298 
2299 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
2300  #if defined(_WIN32)
2301  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
2302  #else
2303  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
2304  #endif
2305 #endif
2306 
2307 #ifndef VMA_SYSTEM_FREE
2308  #if defined(_WIN32)
2309  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
2310  #else
2311  #define VMA_SYSTEM_FREE(ptr) free(ptr)
2312  #endif
2313 #endif
2314 
2315 #ifndef VMA_MIN
2316  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
2317 #endif
2318 
2319 #ifndef VMA_MAX
2320  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
2321 #endif
2322 
2323 #ifndef VMA_SWAP
2324  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
2325 #endif
2326 
2327 #ifndef VMA_SORT
2328  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
2329 #endif
2330 
2331 #ifndef VMA_DEBUG_LOG
2332  #define VMA_DEBUG_LOG(format, ...)
2333  /*
2334  #define VMA_DEBUG_LOG(format, ...) do { \
2335  printf(format, __VA_ARGS__); \
2336  printf("\n"); \
2337  } while(false)
2338  */
2339 #endif
2340 
2341 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
2342 #if VMA_STATS_STRING_ENABLED
2343  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
2344  {
2345  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
2346  }
2347  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
2348  {
2349  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
2350  }
2351  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
2352  {
2353  snprintf(outStr, strLen, "%p", ptr);
2354  }
2355 #endif
2356 
2357 #ifndef VMA_MUTEX
2358  class VmaMutex
2359  {
2360  public:
2361  VmaMutex() { }
2362  ~VmaMutex() { }
2363  void Lock() { m_Mutex.lock(); }
2364  void Unlock() { m_Mutex.unlock(); }
2365  private:
2366  std::mutex m_Mutex;
2367  };
2368  #define VMA_MUTEX VmaMutex
2369 #endif
2370 
2371 /*
2372 If providing your own implementation, you need to implement a subset of std::atomic:
2373 
2374 - Constructor(uint32_t desired)
2375 - uint32_t load() const
2376 - void store(uint32_t desired)
2377 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
2378 */
2379 #ifndef VMA_ATOMIC_UINT32
2380  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
2381 #endif
2382 
2383 #ifndef VMA_BEST_FIT
2384 
2396  #define VMA_BEST_FIT (1)
2397 #endif
2398 
2399 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
2400 
2404  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
2405 #endif
2406 
2407 #ifndef VMA_DEBUG_ALIGNMENT
2408 
2412  #define VMA_DEBUG_ALIGNMENT (1)
2413 #endif
2414 
2415 #ifndef VMA_DEBUG_MARGIN
2416 
2420  #define VMA_DEBUG_MARGIN (0)
2421 #endif
2422 
2423 #ifndef VMA_DEBUG_GLOBAL_MUTEX
2424 
2428  #define VMA_DEBUG_GLOBAL_MUTEX (0)
2429 #endif
2430 
2431 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
2432 
2436  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
2437 #endif
2438 
2439 #ifndef VMA_SMALL_HEAP_MAX_SIZE
2440  #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
2442 #endif
2443 
2444 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
2445  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
2447 #endif
2448 
2449 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2450 
2451 /*******************************************************************************
2452 END OF CONFIGURATION
2453 */
2454 
2455 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2456  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2457 
2458 // Returns number of bits set to 1 in (v).
2459 static inline uint32_t VmaCountBitsSet(uint32_t v)
2460 {
2461  uint32_t c = v - ((v >> 1) & 0x55555555);
2462  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2463  c = ((c >> 4) + c) & 0x0F0F0F0F;
2464  c = ((c >> 8) + c) & 0x00FF00FF;
2465  c = ((c >> 16) + c) & 0x0000FFFF;
2466  return c;
2467 }
2468 
2469 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
2470 // Use types like uint32_t, uint64_t as T.
2471 template <typename T>
2472 static inline T VmaAlignUp(T val, T align)
2473 {
2474  return (val + align - 1) / align * align;
2475 }
2476 
2477 // Division with mathematical rounding to nearest number.
2478 template <typename T>
2479 inline T VmaRoundDiv(T x, T y)
2480 {
2481  return (x + (y / (T)2)) / y;
2482 }
2483 
2484 #ifndef VMA_SORT
2485 
2486 template<typename Iterator, typename Compare>
2487 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2488 {
2489  Iterator centerValue = end; --centerValue;
2490  Iterator insertIndex = beg;
2491  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2492  {
2493  if(cmp(*memTypeIndex, *centerValue))
2494  {
2495  if(insertIndex != memTypeIndex)
2496  {
2497  VMA_SWAP(*memTypeIndex, *insertIndex);
2498  }
2499  ++insertIndex;
2500  }
2501  }
2502  if(insertIndex != centerValue)
2503  {
2504  VMA_SWAP(*insertIndex, *centerValue);
2505  }
2506  return insertIndex;
2507 }
2508 
2509 template<typename Iterator, typename Compare>
2510 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2511 {
2512  if(beg < end)
2513  {
2514  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2515  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2516  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2517  }
2518 }
2519 
2520 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
2521 
2522 #endif // #ifndef VMA_SORT
2523 
2524 /*
2525 Returns true if two memory blocks occupy overlapping pages.
2526 ResourceA must be in less memory offset than ResourceB.
2527 
2528 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
2529 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
2530 */
2531 static inline bool VmaBlocksOnSamePage(
2532  VkDeviceSize resourceAOffset,
2533  VkDeviceSize resourceASize,
2534  VkDeviceSize resourceBOffset,
2535  VkDeviceSize pageSize)
2536 {
2537  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2538  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2539  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2540  VkDeviceSize resourceBStart = resourceBOffset;
2541  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2542  return resourceAEndPage == resourceBStartPage;
2543 }
2544 
2545 enum VmaSuballocationType
2546 {
2547  VMA_SUBALLOCATION_TYPE_FREE = 0,
2548  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2549  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2550  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2551  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2552  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2553  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2554 };
2555 
2556 /*
2557 Returns true if given suballocation types could conflict and must respect
2558 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
2559 or linear image and another one is optimal image. If type is unknown, behave
2560 conservatively.
2561 */
2562 static inline bool VmaIsBufferImageGranularityConflict(
2563  VmaSuballocationType suballocType1,
2564  VmaSuballocationType suballocType2)
2565 {
2566  if(suballocType1 > suballocType2)
2567  {
2568  VMA_SWAP(suballocType1, suballocType2);
2569  }
2570 
2571  switch(suballocType1)
2572  {
2573  case VMA_SUBALLOCATION_TYPE_FREE:
2574  return false;
2575  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2576  return true;
2577  case VMA_SUBALLOCATION_TYPE_BUFFER:
2578  return
2579  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2580  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2581  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2582  return
2583  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2584  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2585  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2586  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2587  return
2588  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2589  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2590  return false;
2591  default:
2592  VMA_ASSERT(0);
2593  return true;
2594  }
2595 }
2596 
2597 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
2598 struct VmaMutexLock
2599 {
2600 public:
2601  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
2602  m_pMutex(useMutex ? &mutex : VMA_NULL)
2603  {
2604  if(m_pMutex)
2605  {
2606  m_pMutex->Lock();
2607  }
2608  }
2609 
2610  ~VmaMutexLock()
2611  {
2612  if(m_pMutex)
2613  {
2614  m_pMutex->Unlock();
2615  }
2616  }
2617 
2618 private:
2619  VMA_MUTEX* m_pMutex;
2620 };
2621 
2622 #if VMA_DEBUG_GLOBAL_MUTEX
2623  static VMA_MUTEX gDebugGlobalMutex;
2624  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
2625 #else
2626  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
2627 #endif
2628 
2629 // Minimum size of a free suballocation to register it in the free suballocation collection.
2630 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2631 
2632 /*
2633 Performs binary search and returns iterator to first element that is greater or
2634 equal to (key), according to comparison (cmp).
2635 
2636 Cmp should return true if first argument is less than second argument.
2637 
2638 Returned value is the found element, if present in the collection or place where
2639 new element with value (key) should be inserted.
2640 */
2641 template <typename IterT, typename KeyT, typename CmpT>
2642 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
2643 {
2644  size_t down = 0, up = (end - beg);
2645  while(down < up)
2646  {
2647  const size_t mid = (down + up) / 2;
2648  if(cmp(*(beg+mid), key))
2649  {
2650  down = mid + 1;
2651  }
2652  else
2653  {
2654  up = mid;
2655  }
2656  }
2657  return beg + down;
2658 }
2659 
2661 // Memory allocation
2662 
2663 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
2664 {
2665  if((pAllocationCallbacks != VMA_NULL) &&
2666  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2667  {
2668  return (*pAllocationCallbacks->pfnAllocation)(
2669  pAllocationCallbacks->pUserData,
2670  size,
2671  alignment,
2672  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2673  }
2674  else
2675  {
2676  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2677  }
2678 }
2679 
2680 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
2681 {
2682  if((pAllocationCallbacks != VMA_NULL) &&
2683  (pAllocationCallbacks->pfnFree != VMA_NULL))
2684  {
2685  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2686  }
2687  else
2688  {
2689  VMA_SYSTEM_FREE(ptr);
2690  }
2691 }
2692 
2693 template<typename T>
2694 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
2695 {
2696  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
2697 }
2698 
2699 template<typename T>
2700 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
2701 {
2702  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
2703 }
2704 
2705 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
2706 
2707 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
2708 
2709 template<typename T>
2710 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2711 {
2712  ptr->~T();
2713  VmaFree(pAllocationCallbacks, ptr);
2714 }
2715 
2716 template<typename T>
2717 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
2718 {
2719  if(ptr != VMA_NULL)
2720  {
2721  for(size_t i = count; i--; )
2722  {
2723  ptr[i].~T();
2724  }
2725  VmaFree(pAllocationCallbacks, ptr);
2726  }
2727 }
2728 
2729 // STL-compatible allocator.
2730 template<typename T>
2731 class VmaStlAllocator
2732 {
2733 public:
2734  const VkAllocationCallbacks* const m_pCallbacks;
2735  typedef T value_type;
2736 
2737  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2738  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2739 
2740  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
2741  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
2742 
2743  template<typename U>
2744  bool operator==(const VmaStlAllocator<U>& rhs) const
2745  {
2746  return m_pCallbacks == rhs.m_pCallbacks;
2747  }
2748  template<typename U>
2749  bool operator!=(const VmaStlAllocator<U>& rhs) const
2750  {
2751  return m_pCallbacks != rhs.m_pCallbacks;
2752  }
2753 
2754  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
2755 };
2756 
2757 #if VMA_USE_STL_VECTOR
2758 
2759 #define VmaVector std::vector
2760 
2761 template<typename T, typename allocatorT>
2762 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
2763 {
2764  vec.insert(vec.begin() + index, item);
2765 }
2766 
2767 template<typename T, typename allocatorT>
2768 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
2769 {
2770  vec.erase(vec.begin() + index);
2771 }
2772 
2773 #else // #if VMA_USE_STL_VECTOR
2774 
2775 /* Class with interface compatible with subset of std::vector.
2776 T must be POD because constructors and destructors are not called and memcpy is
2777 used for these objects. */
2778 template<typename T, typename AllocatorT>
2779 class VmaVector
2780 {
2781 public:
2782  typedef T value_type;
2783 
2784  VmaVector(const AllocatorT& allocator) :
2785  m_Allocator(allocator),
2786  m_pArray(VMA_NULL),
2787  m_Count(0),
2788  m_Capacity(0)
2789  {
2790  }
2791 
2792  VmaVector(size_t count, const AllocatorT& allocator) :
2793  m_Allocator(allocator),
2794  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2795  m_Count(count),
2796  m_Capacity(count)
2797  {
2798  }
2799 
2800  VmaVector(const VmaVector<T, AllocatorT>& src) :
2801  m_Allocator(src.m_Allocator),
2802  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2803  m_Count(src.m_Count),
2804  m_Capacity(src.m_Count)
2805  {
2806  if(m_Count != 0)
2807  {
2808  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
2809  }
2810  }
2811 
2812  ~VmaVector()
2813  {
2814  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2815  }
2816 
2817  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
2818  {
2819  if(&rhs != this)
2820  {
2821  resize(rhs.m_Count);
2822  if(m_Count != 0)
2823  {
2824  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
2825  }
2826  }
2827  return *this;
2828  }
2829 
2830  bool empty() const { return m_Count == 0; }
2831  size_t size() const { return m_Count; }
2832  T* data() { return m_pArray; }
2833  const T* data() const { return m_pArray; }
2834 
2835  T& operator[](size_t index)
2836  {
2837  VMA_HEAVY_ASSERT(index < m_Count);
2838  return m_pArray[index];
2839  }
2840  const T& operator[](size_t index) const
2841  {
2842  VMA_HEAVY_ASSERT(index < m_Count);
2843  return m_pArray[index];
2844  }
2845 
2846  T& front()
2847  {
2848  VMA_HEAVY_ASSERT(m_Count > 0);
2849  return m_pArray[0];
2850  }
2851  const T& front() const
2852  {
2853  VMA_HEAVY_ASSERT(m_Count > 0);
2854  return m_pArray[0];
2855  }
2856  T& back()
2857  {
2858  VMA_HEAVY_ASSERT(m_Count > 0);
2859  return m_pArray[m_Count - 1];
2860  }
2861  const T& back() const
2862  {
2863  VMA_HEAVY_ASSERT(m_Count > 0);
2864  return m_pArray[m_Count - 1];
2865  }
2866 
2867  void reserve(size_t newCapacity, bool freeMemory = false)
2868  {
2869  newCapacity = VMA_MAX(newCapacity, m_Count);
2870 
2871  if((newCapacity < m_Capacity) && !freeMemory)
2872  {
2873  newCapacity = m_Capacity;
2874  }
2875 
2876  if(newCapacity != m_Capacity)
2877  {
2878  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2879  if(m_Count != 0)
2880  {
2881  memcpy(newArray, m_pArray, m_Count * sizeof(T));
2882  }
2883  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2884  m_Capacity = newCapacity;
2885  m_pArray = newArray;
2886  }
2887  }
2888 
2889  void resize(size_t newCount, bool freeMemory = false)
2890  {
2891  size_t newCapacity = m_Capacity;
2892  if(newCount > m_Capacity)
2893  {
2894  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
2895  }
2896  else if(freeMemory)
2897  {
2898  newCapacity = newCount;
2899  }
2900 
2901  if(newCapacity != m_Capacity)
2902  {
2903  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2904  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2905  if(elementsToCopy != 0)
2906  {
2907  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
2908  }
2909  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2910  m_Capacity = newCapacity;
2911  m_pArray = newArray;
2912  }
2913 
2914  m_Count = newCount;
2915  }
2916 
2917  void clear(bool freeMemory = false)
2918  {
2919  resize(0, freeMemory);
2920  }
2921 
2922  void insert(size_t index, const T& src)
2923  {
2924  VMA_HEAVY_ASSERT(index <= m_Count);
2925  const size_t oldCount = size();
2926  resize(oldCount + 1);
2927  if(index < oldCount)
2928  {
2929  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
2930  }
2931  m_pArray[index] = src;
2932  }
2933 
2934  void remove(size_t index)
2935  {
2936  VMA_HEAVY_ASSERT(index < m_Count);
2937  const size_t oldCount = size();
2938  if(index < oldCount - 1)
2939  {
2940  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
2941  }
2942  resize(oldCount - 1);
2943  }
2944 
2945  void push_back(const T& src)
2946  {
2947  const size_t newIndex = size();
2948  resize(newIndex + 1);
2949  m_pArray[newIndex] = src;
2950  }
2951 
2952  void pop_back()
2953  {
2954  VMA_HEAVY_ASSERT(m_Count > 0);
2955  resize(size() - 1);
2956  }
2957 
2958  void push_front(const T& src)
2959  {
2960  insert(0, src);
2961  }
2962 
2963  void pop_front()
2964  {
2965  VMA_HEAVY_ASSERT(m_Count > 0);
2966  remove(0);
2967  }
2968 
2969  typedef T* iterator;
2970 
2971  iterator begin() { return m_pArray; }
2972  iterator end() { return m_pArray + m_Count; }
2973 
2974 private:
2975  AllocatorT m_Allocator;
2976  T* m_pArray;
2977  size_t m_Count;
2978  size_t m_Capacity;
2979 };
2980 
2981 template<typename T, typename allocatorT>
2982 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
2983 {
2984  vec.insert(index, item);
2985 }
2986 
2987 template<typename T, typename allocatorT>
2988 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
2989 {
2990  vec.remove(index);
2991 }
2992 
2993 #endif // #if VMA_USE_STL_VECTOR
2994 
2995 template<typename CmpLess, typename VectorT>
2996 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
2997 {
2998  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2999  vector.data(),
3000  vector.data() + vector.size(),
3001  value,
3002  CmpLess()) - vector.data();
3003  VmaVectorInsert(vector, indexToInsert, value);
3004  return indexToInsert;
3005 }
3006 
3007 template<typename CmpLess, typename VectorT>
3008 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
3009 {
3010  CmpLess comparator;
3011  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3012  vector.begin(),
3013  vector.end(),
3014  value,
3015  comparator);
3016  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3017  {
3018  size_t indexToRemove = it - vector.begin();
3019  VmaVectorRemove(vector, indexToRemove);
3020  return true;
3021  }
3022  return false;
3023 }
3024 
3025 template<typename CmpLess, typename VectorT>
3026 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
3027 {
3028  CmpLess comparator;
3029  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3030  vector.data(),
3031  vector.data() + vector.size(),
3032  value,
3033  comparator);
3034  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3035  {
3036  return it - vector.begin();
3037  }
3038  else
3039  {
3040  return vector.size();
3041  }
3042 }
3043 
3045 // class VmaPoolAllocator
3046 
3047 /*
3048 Allocator for objects of type T using a list of arrays (pools) to speed up
3049 allocation. Number of elements that can be allocated is not bounded because
3050 allocator can create multiple blocks.
3051 */
3052 template<typename T>
3053 class VmaPoolAllocator
3054 {
3055 public:
3056  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
3057  ~VmaPoolAllocator();
3058  void Clear();
3059  T* Alloc();
3060  void Free(T* ptr);
3061 
3062 private:
3063  union Item
3064  {
3065  uint32_t NextFreeIndex;
3066  T Value;
3067  };
3068 
3069  struct ItemBlock
3070  {
3071  Item* pItems;
3072  uint32_t FirstFreeIndex;
3073  };
3074 
3075  const VkAllocationCallbacks* m_pAllocationCallbacks;
3076  size_t m_ItemsPerBlock;
3077  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3078 
3079  ItemBlock& CreateNewBlock();
3080 };
3081 
3082 template<typename T>
3083 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
3084  m_pAllocationCallbacks(pAllocationCallbacks),
3085  m_ItemsPerBlock(itemsPerBlock),
3086  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3087 {
3088  VMA_ASSERT(itemsPerBlock > 0);
3089 }
3090 
3091 template<typename T>
3092 VmaPoolAllocator<T>::~VmaPoolAllocator()
3093 {
3094  Clear();
3095 }
3096 
3097 template<typename T>
3098 void VmaPoolAllocator<T>::Clear()
3099 {
3100  for(size_t i = m_ItemBlocks.size(); i--; )
3101  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3102  m_ItemBlocks.clear();
3103 }
3104 
3105 template<typename T>
3106 T* VmaPoolAllocator<T>::Alloc()
3107 {
3108  for(size_t i = m_ItemBlocks.size(); i--; )
3109  {
3110  ItemBlock& block = m_ItemBlocks[i];
3111  // This block has some free items: Use first one.
3112  if(block.FirstFreeIndex != UINT32_MAX)
3113  {
3114  Item* const pItem = &block.pItems[block.FirstFreeIndex];
3115  block.FirstFreeIndex = pItem->NextFreeIndex;
3116  return &pItem->Value;
3117  }
3118  }
3119 
3120  // No block has free item: Create new one and use it.
3121  ItemBlock& newBlock = CreateNewBlock();
3122  Item* const pItem = &newBlock.pItems[0];
3123  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3124  return &pItem->Value;
3125 }
3126 
3127 template<typename T>
3128 void VmaPoolAllocator<T>::Free(T* ptr)
3129 {
3130  // Search all memory blocks to find ptr.
3131  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
3132  {
3133  ItemBlock& block = m_ItemBlocks[i];
3134 
3135  // Casting to union.
3136  Item* pItemPtr;
3137  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
3138 
3139  // Check if pItemPtr is in address range of this block.
3140  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3141  {
3142  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
3143  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3144  block.FirstFreeIndex = index;
3145  return;
3146  }
3147  }
3148  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
3149 }
3150 
3151 template<typename T>
3152 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3153 {
3154  ItemBlock newBlock = {
3155  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3156 
3157  m_ItemBlocks.push_back(newBlock);
3158 
3159  // Setup singly-linked list of all free items in this block.
3160  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3161  newBlock.pItems[i].NextFreeIndex = i + 1;
3162  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3163  return m_ItemBlocks.back();
3164 }
3165 
3167 // class VmaRawList, VmaList
3168 
3169 #if VMA_USE_STL_LIST
3170 
3171 #define VmaList std::list
3172 
3173 #else // #if VMA_USE_STL_LIST
3174 
3175 template<typename T>
3176 struct VmaListItem
3177 {
3178  VmaListItem* pPrev;
3179  VmaListItem* pNext;
3180  T Value;
3181 };
3182 
3183 // Doubly linked list.
3184 template<typename T>
3185 class VmaRawList
3186 {
3187 public:
3188  typedef VmaListItem<T> ItemType;
3189 
3190  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
3191  ~VmaRawList();
3192  void Clear();
3193 
3194  size_t GetCount() const { return m_Count; }
3195  bool IsEmpty() const { return m_Count == 0; }
3196 
3197  ItemType* Front() { return m_pFront; }
3198  const ItemType* Front() const { return m_pFront; }
3199  ItemType* Back() { return m_pBack; }
3200  const ItemType* Back() const { return m_pBack; }
3201 
3202  ItemType* PushBack();
3203  ItemType* PushFront();
3204  ItemType* PushBack(const T& value);
3205  ItemType* PushFront(const T& value);
3206  void PopBack();
3207  void PopFront();
3208 
3209  // Item can be null - it means PushBack.
3210  ItemType* InsertBefore(ItemType* pItem);
3211  // Item can be null - it means PushFront.
3212  ItemType* InsertAfter(ItemType* pItem);
3213 
3214  ItemType* InsertBefore(ItemType* pItem, const T& value);
3215  ItemType* InsertAfter(ItemType* pItem, const T& value);
3216 
3217  void Remove(ItemType* pItem);
3218 
3219 private:
3220  const VkAllocationCallbacks* const m_pAllocationCallbacks;
3221  VmaPoolAllocator<ItemType> m_ItemAllocator;
3222  ItemType* m_pFront;
3223  ItemType* m_pBack;
3224  size_t m_Count;
3225 
3226  // Declared not defined, to block copy constructor and assignment operator.
3227  VmaRawList(const VmaRawList<T>& src);
3228  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
3229 };
3230 
3231 template<typename T>
3232 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
3233  m_pAllocationCallbacks(pAllocationCallbacks),
3234  m_ItemAllocator(pAllocationCallbacks, 128),
3235  m_pFront(VMA_NULL),
3236  m_pBack(VMA_NULL),
3237  m_Count(0)
3238 {
3239 }
3240 
3241 template<typename T>
3242 VmaRawList<T>::~VmaRawList()
3243 {
3244  // Intentionally not calling Clear, because that would be unnecessary
3245  // computations to return all items to m_ItemAllocator as free.
3246 }
3247 
3248 template<typename T>
3249 void VmaRawList<T>::Clear()
3250 {
3251  if(IsEmpty() == false)
3252  {
3253  ItemType* pItem = m_pBack;
3254  while(pItem != VMA_NULL)
3255  {
3256  ItemType* const pPrevItem = pItem->pPrev;
3257  m_ItemAllocator.Free(pItem);
3258  pItem = pPrevItem;
3259  }
3260  m_pFront = VMA_NULL;
3261  m_pBack = VMA_NULL;
3262  m_Count = 0;
3263  }
3264 }
3265 
3266 template<typename T>
3267 VmaListItem<T>* VmaRawList<T>::PushBack()
3268 {
3269  ItemType* const pNewItem = m_ItemAllocator.Alloc();
3270  pNewItem->pNext = VMA_NULL;
3271  if(IsEmpty())
3272  {
3273  pNewItem->pPrev = VMA_NULL;
3274  m_pFront = pNewItem;
3275  m_pBack = pNewItem;
3276  m_Count = 1;
3277  }
3278  else
3279  {
3280  pNewItem->pPrev = m_pBack;
3281  m_pBack->pNext = pNewItem;
3282  m_pBack = pNewItem;
3283  ++m_Count;
3284  }
3285  return pNewItem;
3286 }
3287 
3288 template<typename T>
3289 VmaListItem<T>* VmaRawList<T>::PushFront()
3290 {
3291  ItemType* const pNewItem = m_ItemAllocator.Alloc();
3292  pNewItem->pPrev = VMA_NULL;
3293  if(IsEmpty())
3294  {
3295  pNewItem->pNext = VMA_NULL;
3296  m_pFront = pNewItem;
3297  m_pBack = pNewItem;
3298  m_Count = 1;
3299  }
3300  else
3301  {
3302  pNewItem->pNext = m_pFront;
3303  m_pFront->pPrev = pNewItem;
3304  m_pFront = pNewItem;
3305  ++m_Count;
3306  }
3307  return pNewItem;
3308 }
3309 
3310 template<typename T>
3311 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
3312 {
3313  ItemType* const pNewItem = PushBack();
3314  pNewItem->Value = value;
3315  return pNewItem;
3316 }
3317 
3318 template<typename T>
3319 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
3320 {
3321  ItemType* const pNewItem = PushFront();
3322  pNewItem->Value = value;
3323  return pNewItem;
3324 }
3325 
3326 template<typename T>
3327 void VmaRawList<T>::PopBack()
3328 {
3329  VMA_HEAVY_ASSERT(m_Count > 0);
3330  ItemType* const pBackItem = m_pBack;
3331  ItemType* const pPrevItem = pBackItem->pPrev;
3332  if(pPrevItem != VMA_NULL)
3333  {
3334  pPrevItem->pNext = VMA_NULL;
3335  }
3336  m_pBack = pPrevItem;
3337  m_ItemAllocator.Free(pBackItem);
3338  --m_Count;
3339 }
3340 
3341 template<typename T>
3342 void VmaRawList<T>::PopFront()
3343 {
3344  VMA_HEAVY_ASSERT(m_Count > 0);
3345  ItemType* const pFrontItem = m_pFront;
3346  ItemType* const pNextItem = pFrontItem->pNext;
3347  if(pNextItem != VMA_NULL)
3348  {
3349  pNextItem->pPrev = VMA_NULL;
3350  }
3351  m_pFront = pNextItem;
3352  m_ItemAllocator.Free(pFrontItem);
3353  --m_Count;
3354 }
3355 
3356 template<typename T>
3357 void VmaRawList<T>::Remove(ItemType* pItem)
3358 {
3359  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3360  VMA_HEAVY_ASSERT(m_Count > 0);
3361 
3362  if(pItem->pPrev != VMA_NULL)
3363  {
3364  pItem->pPrev->pNext = pItem->pNext;
3365  }
3366  else
3367  {
3368  VMA_HEAVY_ASSERT(m_pFront == pItem);
3369  m_pFront = pItem->pNext;
3370  }
3371 
3372  if(pItem->pNext != VMA_NULL)
3373  {
3374  pItem->pNext->pPrev = pItem->pPrev;
3375  }
3376  else
3377  {
3378  VMA_HEAVY_ASSERT(m_pBack == pItem);
3379  m_pBack = pItem->pPrev;
3380  }
3381 
3382  m_ItemAllocator.Free(pItem);
3383  --m_Count;
3384 }
3385 
3386 template<typename T>
3387 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3388 {
3389  if(pItem != VMA_NULL)
3390  {
3391  ItemType* const prevItem = pItem->pPrev;
3392  ItemType* const newItem = m_ItemAllocator.Alloc();
3393  newItem->pPrev = prevItem;
3394  newItem->pNext = pItem;
3395  pItem->pPrev = newItem;
3396  if(prevItem != VMA_NULL)
3397  {
3398  prevItem->pNext = newItem;
3399  }
3400  else
3401  {
3402  VMA_HEAVY_ASSERT(m_pFront == pItem);
3403  m_pFront = newItem;
3404  }
3405  ++m_Count;
3406  return newItem;
3407  }
3408  else
3409  return PushBack();
3410 }
3411 
3412 template<typename T>
3413 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3414 {
3415  if(pItem != VMA_NULL)
3416  {
3417  ItemType* const nextItem = pItem->pNext;
3418  ItemType* const newItem = m_ItemAllocator.Alloc();
3419  newItem->pNext = nextItem;
3420  newItem->pPrev = pItem;
3421  pItem->pNext = newItem;
3422  if(nextItem != VMA_NULL)
3423  {
3424  nextItem->pPrev = newItem;
3425  }
3426  else
3427  {
3428  VMA_HEAVY_ASSERT(m_pBack == pItem);
3429  m_pBack = newItem;
3430  }
3431  ++m_Count;
3432  return newItem;
3433  }
3434  else
3435  return PushFront();
3436 }
3437 
3438 template<typename T>
3439 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
3440 {
3441  ItemType* const newItem = InsertBefore(pItem);
3442  newItem->Value = value;
3443  return newItem;
3444 }
3445 
3446 template<typename T>
3447 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
3448 {
3449  ItemType* const newItem = InsertAfter(pItem);
3450  newItem->Value = value;
3451  return newItem;
3452 }
3453 
3454 template<typename T, typename AllocatorT>
3455 class VmaList
3456 {
3457 public:
3458  class iterator
3459  {
3460  public:
3461  iterator() :
3462  m_pList(VMA_NULL),
3463  m_pItem(VMA_NULL)
3464  {
3465  }
3466 
3467  T& operator*() const
3468  {
3469  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3470  return m_pItem->Value;
3471  }
3472  T* operator->() const
3473  {
3474  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3475  return &m_pItem->Value;
3476  }
3477 
3478  iterator& operator++()
3479  {
3480  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3481  m_pItem = m_pItem->pNext;
3482  return *this;
3483  }
3484  iterator& operator--()
3485  {
3486  if(m_pItem != VMA_NULL)
3487  {
3488  m_pItem = m_pItem->pPrev;
3489  }
3490  else
3491  {
3492  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3493  m_pItem = m_pList->Back();
3494  }
3495  return *this;
3496  }
3497 
3498  iterator operator++(int)
3499  {
3500  iterator result = *this;
3501  ++*this;
3502  return result;
3503  }
3504  iterator operator--(int)
3505  {
3506  iterator result = *this;
3507  --*this;
3508  return result;
3509  }
3510 
3511  bool operator==(const iterator& rhs) const
3512  {
3513  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3514  return m_pItem == rhs.m_pItem;
3515  }
3516  bool operator!=(const iterator& rhs) const
3517  {
3518  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3519  return m_pItem != rhs.m_pItem;
3520  }
3521 
3522  private:
3523  VmaRawList<T>* m_pList;
3524  VmaListItem<T>* m_pItem;
3525 
3526  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3527  m_pList(pList),
3528  m_pItem(pItem)
3529  {
3530  }
3531 
3532  friend class VmaList<T, AllocatorT>;
3533  };
3534 
3535  class const_iterator
3536  {
3537  public:
3538  const_iterator() :
3539  m_pList(VMA_NULL),
3540  m_pItem(VMA_NULL)
3541  {
3542  }
3543 
3544  const_iterator(const iterator& src) :
3545  m_pList(src.m_pList),
3546  m_pItem(src.m_pItem)
3547  {
3548  }
3549 
3550  const T& operator*() const
3551  {
3552  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3553  return m_pItem->Value;
3554  }
3555  const T* operator->() const
3556  {
3557  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3558  return &m_pItem->Value;
3559  }
3560 
3561  const_iterator& operator++()
3562  {
3563  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3564  m_pItem = m_pItem->pNext;
3565  return *this;
3566  }
3567  const_iterator& operator--()
3568  {
3569  if(m_pItem != VMA_NULL)
3570  {
3571  m_pItem = m_pItem->pPrev;
3572  }
3573  else
3574  {
3575  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3576  m_pItem = m_pList->Back();
3577  }
3578  return *this;
3579  }
3580 
3581  const_iterator operator++(int)
3582  {
3583  const_iterator result = *this;
3584  ++*this;
3585  return result;
3586  }
3587  const_iterator operator--(int)
3588  {
3589  const_iterator result = *this;
3590  --*this;
3591  return result;
3592  }
3593 
3594  bool operator==(const const_iterator& rhs) const
3595  {
3596  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3597  return m_pItem == rhs.m_pItem;
3598  }
3599  bool operator!=(const const_iterator& rhs) const
3600  {
3601  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3602  return m_pItem != rhs.m_pItem;
3603  }
3604 
3605  private:
3606  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
3607  m_pList(pList),
3608  m_pItem(pItem)
3609  {
3610  }
3611 
3612  const VmaRawList<T>* m_pList;
3613  const VmaListItem<T>* m_pItem;
3614 
3615  friend class VmaList<T, AllocatorT>;
3616  };
3617 
3618  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3619 
3620  bool empty() const { return m_RawList.IsEmpty(); }
3621  size_t size() const { return m_RawList.GetCount(); }
3622 
3623  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
3624  iterator end() { return iterator(&m_RawList, VMA_NULL); }
3625 
3626  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
3627  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
3628 
3629  void clear() { m_RawList.Clear(); }
3630  void push_back(const T& value) { m_RawList.PushBack(value); }
3631  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3632  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3633 
3634 private:
3635  VmaRawList<T> m_RawList;
3636 };
3637 
3638 #endif // #if VMA_USE_STL_LIST
3639 
3641 // class VmaMap
3642 
3643 // Unused in this version.
3644 #if 0
3645 
3646 #if VMA_USE_STL_UNORDERED_MAP
3647 
3648 #define VmaPair std::pair
3649 
3650 #define VMA_MAP_TYPE(KeyT, ValueT) \
3651  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
3652 
3653 #else // #if VMA_USE_STL_UNORDERED_MAP
3654 
3655 template<typename T1, typename T2>
3656 struct VmaPair
3657 {
3658  T1 first;
3659  T2 second;
3660 
3661  VmaPair() : first(), second() { }
3662  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3663 };
3664 
3665 /* Class compatible with subset of interface of std::unordered_map.
3666 KeyT, ValueT must be POD because they will be stored in VmaVector.
3667 */
3668 template<typename KeyT, typename ValueT>
3669 class VmaMap
3670 {
3671 public:
3672  typedef VmaPair<KeyT, ValueT> PairType;
3673  typedef PairType* iterator;
3674 
3675  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3676 
3677  iterator begin() { return m_Vector.begin(); }
3678  iterator end() { return m_Vector.end(); }
3679 
3680  void insert(const PairType& pair);
3681  iterator find(const KeyT& key);
3682  void erase(iterator it);
3683 
3684 private:
3685  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3686 };
3687 
3688 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
3689 
3690 template<typename FirstT, typename SecondT>
3691 struct VmaPairFirstLess
3692 {
3693  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
3694  {
3695  return lhs.first < rhs.first;
3696  }
3697  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
3698  {
3699  return lhs.first < rhsFirst;
3700  }
3701 };
3702 
3703 template<typename KeyT, typename ValueT>
3704 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
3705 {
3706  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3707  m_Vector.data(),
3708  m_Vector.data() + m_Vector.size(),
3709  pair,
3710  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3711  VmaVectorInsert(m_Vector, indexToInsert, pair);
3712 }
3713 
3714 template<typename KeyT, typename ValueT>
3715 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
3716 {
3717  PairType* it = VmaBinaryFindFirstNotLess(
3718  m_Vector.data(),
3719  m_Vector.data() + m_Vector.size(),
3720  key,
3721  VmaPairFirstLess<KeyT, ValueT>());
3722  if((it != m_Vector.end()) && (it->first == key))
3723  {
3724  return it;
3725  }
3726  else
3727  {
3728  return m_Vector.end();
3729  }
3730 }
3731 
3732 template<typename KeyT, typename ValueT>
3733 void VmaMap<KeyT, ValueT>::erase(iterator it)
3734 {
3735  VmaVectorRemove(m_Vector, it - m_Vector.begin());
3736 }
3737 
3738 #endif // #if VMA_USE_STL_UNORDERED_MAP
3739 
3740 #endif // #if 0
3741 
3743 
3744 class VmaDeviceMemoryBlock;
3745 
3746 struct VmaAllocation_T
3747 {
3748 private:
3749  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3750 
3751  enum FLAGS
3752  {
3753  FLAG_USER_DATA_STRING = 0x01,
3754  };
3755 
3756 public:
3757  enum ALLOCATION_TYPE
3758  {
3759  ALLOCATION_TYPE_NONE,
3760  ALLOCATION_TYPE_BLOCK,
3761  ALLOCATION_TYPE_DEDICATED,
3762  };
3763 
3764  VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
3765  m_Alignment(1),
3766  m_Size(0),
3767  m_pUserData(VMA_NULL),
3768  m_LastUseFrameIndex(currentFrameIndex),
3769  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3770  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3771  m_MapCount(0),
3772  m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3773  {
3774  }
3775 
3776  ~VmaAllocation_T()
3777  {
3778  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
3779 
3780  // Check if owned string was freed.
3781  VMA_ASSERT(m_pUserData == VMA_NULL);
3782  }
3783 
3784  void InitBlockAllocation(
3785  VmaPool hPool,
3786  VmaDeviceMemoryBlock* block,
3787  VkDeviceSize offset,
3788  VkDeviceSize alignment,
3789  VkDeviceSize size,
3790  VmaSuballocationType suballocationType,
3791  bool mapped,
3792  bool canBecomeLost)
3793  {
3794  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3795  VMA_ASSERT(block != VMA_NULL);
3796  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3797  m_Alignment = alignment;
3798  m_Size = size;
3799  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3800  m_SuballocationType = (uint8_t)suballocationType;
3801  m_BlockAllocation.m_hPool = hPool;
3802  m_BlockAllocation.m_Block = block;
3803  m_BlockAllocation.m_Offset = offset;
3804  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3805  }
3806 
3807  void InitLost()
3808  {
3809  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3810  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3811  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3812  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3813  m_BlockAllocation.m_Block = VMA_NULL;
3814  m_BlockAllocation.m_Offset = 0;
3815  m_BlockAllocation.m_CanBecomeLost = true;
3816  }
3817 
3818  void ChangeBlockAllocation(
3819  VmaAllocator hAllocator,
3820  VmaDeviceMemoryBlock* block,
3821  VkDeviceSize offset);
3822 
3823  // pMappedData not null means allocation is created with MAPPED flag.
3824  void InitDedicatedAllocation(
3825  uint32_t memoryTypeIndex,
3826  VkDeviceMemory hMemory,
3827  VmaSuballocationType suballocationType,
3828  void* pMappedData,
3829  VkDeviceSize size)
3830  {
3831  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3832  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3833  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3834  m_Alignment = 0;
3835  m_Size = size;
3836  m_SuballocationType = (uint8_t)suballocationType;
3837  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3838  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3839  m_DedicatedAllocation.m_hMemory = hMemory;
3840  m_DedicatedAllocation.m_pMappedData = pMappedData;
3841  }
3842 
3843  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
3844  VkDeviceSize GetAlignment() const { return m_Alignment; }
3845  VkDeviceSize GetSize() const { return m_Size; }
3846  bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3847  void* GetUserData() const { return m_pUserData; }
3848  void SetUserData(VmaAllocator hAllocator, void* pUserData);
3849  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
3850 
3851  VmaDeviceMemoryBlock* GetBlock() const
3852  {
3853  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3854  return m_BlockAllocation.m_Block;
3855  }
3856  VkDeviceSize GetOffset() const;
3857  VkDeviceMemory GetMemory() const;
3858  uint32_t GetMemoryTypeIndex() const;
3859  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3860  void* GetMappedData() const;
3861  bool CanBecomeLost() const;
3862  VmaPool GetPool() const;
3863 
3864  uint32_t GetLastUseFrameIndex() const
3865  {
3866  return m_LastUseFrameIndex.load();
3867  }
3868  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3869  {
3870  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3871  }
3872  /*
3873  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
3874  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
3875  - Else, returns false.
3876 
3877  If hAllocation is already lost, assert - you should not call it then.
3878  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
3879  */
3880  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3881 
3882  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
3883  {
3884  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3885  outInfo.blockCount = 1;
3886  outInfo.allocationCount = 1;
3887  outInfo.unusedRangeCount = 0;
3888  outInfo.usedBytes = m_Size;
3889  outInfo.unusedBytes = 0;
3890  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
3891  outInfo.unusedRangeSizeMin = UINT64_MAX;
3892  outInfo.unusedRangeSizeMax = 0;
3893  }
3894 
3895  void BlockAllocMap();
3896  void BlockAllocUnmap();
3897  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
3898  void DedicatedAllocUnmap(VmaAllocator hAllocator);
3899 
3900 private:
3901  VkDeviceSize m_Alignment;
3902  VkDeviceSize m_Size;
3903  void* m_pUserData;
3904  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3905  uint8_t m_Type; // ALLOCATION_TYPE
3906  uint8_t m_SuballocationType; // VmaSuballocationType
3907  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
3908  // Bits with mask 0x7F are reference counter for vmaMapMemory()/vmaUnmapMemory().
3909  uint8_t m_MapCount;
3910  uint8_t m_Flags; // enum FLAGS
3911 
3912  // Allocation out of VmaDeviceMemoryBlock.
3913  struct BlockAllocation
3914  {
3915  VmaPool m_hPool; // Null if belongs to general memory.
3916  VmaDeviceMemoryBlock* m_Block;
3917  VkDeviceSize m_Offset;
3918  bool m_CanBecomeLost;
3919  };
3920 
3921  // Allocation for an object that has its own private VkDeviceMemory.
3922  struct DedicatedAllocation
3923  {
3924  uint32_t m_MemoryTypeIndex;
3925  VkDeviceMemory m_hMemory;
3926  void* m_pMappedData; // Not null means memory is mapped.
3927  };
3928 
3929  union
3930  {
3931  // Allocation out of VmaDeviceMemoryBlock.
3932  BlockAllocation m_BlockAllocation;
3933  // Allocation for an object that has its own private VkDeviceMemory.
3934  DedicatedAllocation m_DedicatedAllocation;
3935  };
3936 
3937  void FreeUserDataString(VmaAllocator hAllocator);
3938 };
3939 
3940 /*
3941 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
3942 allocated memory block or free.
3943 */
3944 struct VmaSuballocation
3945 {
3946  VkDeviceSize offset;
3947  VkDeviceSize size;
3948  VmaAllocation hAllocation;
3949  VmaSuballocationType type;
3950 };
3951 
3952 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3953 
3954 // Cost of one additional allocation lost, as equivalent in bytes.
3955 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3956 
3957 /*
3958 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
3959 
3960 If canMakeOtherLost was false:
3961 - item points to a FREE suballocation.
3962 - itemsToMakeLostCount is 0.
3963 
3964 If canMakeOtherLost was true:
3965 - item points to first of sequence of suballocations, which are either FREE,
3966  or point to VmaAllocations that can become lost.
3967 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
3968  the requested allocation to succeed.
3969 */
3970 struct VmaAllocationRequest
3971 {
3972  VkDeviceSize offset;
3973  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
3974  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
3975  VmaSuballocationList::iterator item;
3976  size_t itemsToMakeLostCount;
3977 
3978  VkDeviceSize CalcCost() const
3979  {
3980  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3981  }
3982 };
3983 
3984 /*
3985 Data structure used for bookkeeping of allocations and unused ranges of memory
3986 in a single VkDeviceMemory block.
3987 */
3988 class VmaBlockMetadata
3989 {
3990 public:
3991  VmaBlockMetadata(VmaAllocator hAllocator);
3992  ~VmaBlockMetadata();
3993  void Init(VkDeviceSize size);
3994 
3995  // Validates all data structures inside this object. If not valid, returns false.
3996  bool Validate() const;
3997  VkDeviceSize GetSize() const { return m_Size; }
3998  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
3999  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
4000  VkDeviceSize GetUnusedRangeSizeMax() const;
4001  // Returns true if this block is empty - contains only single free suballocation.
4002  bool IsEmpty() const;
4003 
4004  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
4005  void AddPoolStats(VmaPoolStats& inoutStats) const;
4006 
4007 #if VMA_STATS_STRING_ENABLED
4008  void PrintDetailedMap(class VmaJsonWriter& json) const;
4009 #endif
4010 
4011  // Creates trivial request for case when block is empty.
4012  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
4013 
4014  // Tries to find a place for suballocation with given parameters inside this block.
4015  // If succeeded, fills pAllocationRequest and returns true.
4016  // If failed, returns false.
4017  bool CreateAllocationRequest(
4018  uint32_t currentFrameIndex,
4019  uint32_t frameInUseCount,
4020  VkDeviceSize bufferImageGranularity,
4021  VkDeviceSize allocSize,
4022  VkDeviceSize allocAlignment,
4023  VmaSuballocationType allocType,
4024  bool canMakeOtherLost,
4025  VmaAllocationRequest* pAllocationRequest);
4026 
4027  bool MakeRequestedAllocationsLost(
4028  uint32_t currentFrameIndex,
4029  uint32_t frameInUseCount,
4030  VmaAllocationRequest* pAllocationRequest);
4031 
4032  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4033 
4034  // Makes actual allocation based on request. Request must already be checked and valid.
4035  void Alloc(
4036  const VmaAllocationRequest& request,
4037  VmaSuballocationType type,
4038  VkDeviceSize allocSize,
4039  VmaAllocation hAllocation);
4040 
4041  // Frees suballocation assigned to given memory region.
4042  void Free(const VmaAllocation allocation);
4043  void FreeAtOffset(VkDeviceSize offset);
4044 
4045 private:
4046  VkDeviceSize m_Size;
4047  uint32_t m_FreeCount;
4048  VkDeviceSize m_SumFreeSize;
4049  VmaSuballocationList m_Suballocations;
4050  // Suballocations that are free and have size greater than certain threshold.
4051  // Sorted by size, ascending.
4052  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4053 
4054  bool ValidateFreeSuballocationList() const;
4055 
4056  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
4057  // If yes, fills pOffset and returns true. If no, returns false.
4058  bool CheckAllocation(
4059  uint32_t currentFrameIndex,
4060  uint32_t frameInUseCount,
4061  VkDeviceSize bufferImageGranularity,
4062  VkDeviceSize allocSize,
4063  VkDeviceSize allocAlignment,
4064  VmaSuballocationType allocType,
4065  VmaSuballocationList::const_iterator suballocItem,
4066  bool canMakeOtherLost,
4067  VkDeviceSize* pOffset,
4068  size_t* itemsToMakeLostCount,
4069  VkDeviceSize* pSumFreeSize,
4070  VkDeviceSize* pSumItemSize) const;
4071  // Given free suballocation, it merges it with following one, which must also be free.
4072  void MergeFreeWithNext(VmaSuballocationList::iterator item);
4073  // Releases given suballocation, making it free.
4074  // Merges it with adjacent free suballocations if applicable.
4075  // Returns iterator to new free suballocation at this place.
4076  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4077  // Given free suballocation, it inserts it into sorted list of
4078  // m_FreeSuballocationsBySize if it's suitable.
4079  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4080  // Given free suballocation, it removes it from sorted list of
4081  // m_FreeSuballocationsBySize if it's suitable.
4082  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4083 };
4084 
4085 /*
4086 Represents a single block of device memory (`VkDeviceMemory`) with all the
4087 data about its regions (aka suballocations, #VmaAllocation), assigned and free.
4088 
4089 Thread-safety: This class must be externally synchronized.
4090 */
4091 class VmaDeviceMemoryBlock
4092 {
4093 public:
4094  VmaBlockMetadata m_Metadata;
4095 
4096  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
4097 
4098  ~VmaDeviceMemoryBlock()
4099  {
4100  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
4101  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4102  }
4103 
4104  // Always call after construction.
4105  void Init(
4106  uint32_t newMemoryTypeIndex,
4107  VkDeviceMemory newMemory,
4108  VkDeviceSize newSize);
4109  // Always call before destruction.
4110  void Destroy(VmaAllocator allocator);
4111 
4112  VkDeviceMemory GetDeviceMemory() const { return m_hMemory; }
4113  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
4114  void* GetMappedData() const { return m_pMappedData; }
4115 
4116  // Validates all data structures inside this object. If not valid, returns false.
4117  bool Validate() const;
4118 
4119  // ppData can be null.
4120  VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData);
4121  void Unmap(VmaAllocator hAllocator, uint32_t count);
4122 
4123  VkResult BindBufferMemory(
4124  const VmaAllocator hAllocator,
4125  const VmaAllocation hAllocation,
4126  VkBuffer hBuffer);
4127  VkResult BindImageMemory(
4128  const VmaAllocator hAllocator,
4129  const VmaAllocation hAllocation,
4130  VkImage hImage);
4131 
4132 private:
4133  uint32_t m_MemoryTypeIndex;
4134  VkDeviceMemory m_hMemory;
4135 
4136  // Protects access to m_hMemory so it's not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory.
4137  // Also protects m_MapCount, m_pMappedData.
4138  VMA_MUTEX m_Mutex;
4139  uint32_t m_MapCount;
4140  void* m_pMappedData;
4141 };
4142 
4143 struct VmaPointerLess
4144 {
4145  bool operator()(const void* lhs, const void* rhs) const
4146  {
4147  return lhs < rhs;
4148  }
4149 };
4150 
4151 class VmaDefragmentator;
4152 
4153 /*
4154 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
4155 Vulkan memory type.
4156 
4157 Synchronized internally with a mutex.
4158 */
4159 struct VmaBlockVector
4160 {
4161  VmaBlockVector(
4162  VmaAllocator hAllocator,
4163  uint32_t memoryTypeIndex,
4164  VkDeviceSize preferredBlockSize,
4165  size_t minBlockCount,
4166  size_t maxBlockCount,
4167  VkDeviceSize bufferImageGranularity,
4168  uint32_t frameInUseCount,
4169  bool isCustomPool);
4170  ~VmaBlockVector();
4171 
4172  VkResult CreateMinBlocks();
4173 
4174  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
4175  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
4176  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
4177  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
4178 
4179  void GetPoolStats(VmaPoolStats* pStats);
4180 
4181  bool IsEmpty() const { return m_Blocks.empty(); }
4182 
4183  VkResult Allocate(
4184  VmaPool hCurrentPool,
4185  uint32_t currentFrameIndex,
4186  const VkMemoryRequirements& vkMemReq,
4187  const VmaAllocationCreateInfo& createInfo,
4188  VmaSuballocationType suballocType,
4189  VmaAllocation* pAllocation);
4190 
4191  void Free(
4192  VmaAllocation hAllocation);
4193 
4194  // Adds statistics of this BlockVector to pStats.
4195  void AddStats(VmaStats* pStats);
4196 
4197 #if VMA_STATS_STRING_ENABLED
4198  void PrintDetailedMap(class VmaJsonWriter& json);
4199 #endif
4200 
4201  void MakePoolAllocationsLost(
4202  uint32_t currentFrameIndex,
4203  size_t* pLostAllocationCount);
4204 
4205  VmaDefragmentator* EnsureDefragmentator(
4206  VmaAllocator hAllocator,
4207  uint32_t currentFrameIndex);
4208 
4209  VkResult Defragment(
4210  VmaDefragmentationStats* pDefragmentationStats,
4211  VkDeviceSize& maxBytesToMove,
4212  uint32_t& maxAllocationsToMove);
4213 
4214  void DestroyDefragmentator();
4215 
4216 private:
4217  friend class VmaDefragmentator;
4218 
4219  const VmaAllocator m_hAllocator;
4220  const uint32_t m_MemoryTypeIndex;
4221  const VkDeviceSize m_PreferredBlockSize;
4222  const size_t m_MinBlockCount;
4223  const size_t m_MaxBlockCount;
4224  const VkDeviceSize m_BufferImageGranularity;
4225  const uint32_t m_FrameInUseCount;
4226  const bool m_IsCustomPool;
4227  VMA_MUTEX m_Mutex;
4228  // Incrementally sorted by sumFreeSize, ascending.
4229  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4230  /* There can be at most one allocation that is completely empty - a
4231  hysteresis to avoid pessimistic case of alternating creation and destruction
4232  of a VkDeviceMemory. */
4233  bool m_HasEmptyBlock;
4234  VmaDefragmentator* m_pDefragmentator;
4235 
4236  size_t CalcMaxBlockSize() const;
4237 
4238  // Finds and removes given block from vector.
4239  void Remove(VmaDeviceMemoryBlock* pBlock);
4240 
4241  // Performs single step in sorting m_Blocks. They may not be fully sorted
4242  // after this call.
4243  void IncrementallySortBlocks();
4244 
4245  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
4246 };
4247 
4248 struct VmaPool_T
4249 {
4250 public:
4251  VmaBlockVector m_BlockVector;
4252 
4253  // Takes ownership.
4254  VmaPool_T(
4255  VmaAllocator hAllocator,
4256  const VmaPoolCreateInfo& createInfo);
4257  ~VmaPool_T();
4258 
4259  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
4260 
4261 #if VMA_STATS_STRING_ENABLED
4262  //void PrintDetailedMap(class VmaStringBuilder& sb);
4263 #endif
4264 };
4265 
4266 class VmaDefragmentator
4267 {
4268  const VmaAllocator m_hAllocator;
4269  VmaBlockVector* const m_pBlockVector;
4270  uint32_t m_CurrentFrameIndex;
4271  VkDeviceSize m_BytesMoved;
4272  uint32_t m_AllocationsMoved;
4273 
4274  struct AllocationInfo
4275  {
4276  VmaAllocation m_hAllocation;
4277  VkBool32* m_pChanged;
4278 
4279  AllocationInfo() :
4280  m_hAllocation(VK_NULL_HANDLE),
4281  m_pChanged(VMA_NULL)
4282  {
4283  }
4284  };
4285 
4286  struct AllocationInfoSizeGreater
4287  {
4288  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
4289  {
4290  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4291  }
4292  };
4293 
4294  // Used between AddAllocation and Defragment.
4295  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4296 
4297  struct BlockInfo
4298  {
4299  VmaDeviceMemoryBlock* m_pBlock;
4300  bool m_HasNonMovableAllocations;
4301  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4302 
4303  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
4304  m_pBlock(VMA_NULL),
4305  m_HasNonMovableAllocations(true),
4306  m_Allocations(pAllocationCallbacks),
4307  m_pMappedDataForDefragmentation(VMA_NULL)
4308  {
4309  }
4310 
4311  void CalcHasNonMovableAllocations()
4312  {
4313  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4314  const size_t defragmentAllocCount = m_Allocations.size();
4315  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4316  }
4317 
4318  void SortAllocationsBySizeDescecnding()
4319  {
4320  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4321  }
4322 
4323  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
4324  void Unmap(VmaAllocator hAllocator);
4325 
4326  private:
4327  // Not null if mapped for defragmentation only, not originally mapped.
4328  void* m_pMappedDataForDefragmentation;
4329  };
4330 
4331  struct BlockPointerLess
4332  {
4333  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
4334  {
4335  return pLhsBlockInfo->m_pBlock < pRhsBlock;
4336  }
4337  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
4338  {
4339  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4340  }
4341  };
4342 
4343  // 1. Blocks with some non-movable allocations go first.
4344  // 2. Blocks with smaller sumFreeSize go first.
4345  struct BlockInfoCompareMoveDestination
4346  {
4347  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
4348  {
4349  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4350  {
4351  return true;
4352  }
4353  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4354  {
4355  return false;
4356  }
4357  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4358  {
4359  return true;
4360  }
4361  return false;
4362  }
4363  };
4364 
4365  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4366  BlockInfoVector m_Blocks;
4367 
4368  VkResult DefragmentRound(
4369  VkDeviceSize maxBytesToMove,
4370  uint32_t maxAllocationsToMove);
4371 
4372  static bool MoveMakesSense(
4373  size_t dstBlockIndex, VkDeviceSize dstOffset,
4374  size_t srcBlockIndex, VkDeviceSize srcOffset);
4375 
4376 public:
4377  VmaDefragmentator(
4378  VmaAllocator hAllocator,
4379  VmaBlockVector* pBlockVector,
4380  uint32_t currentFrameIndex);
4381 
4382  ~VmaDefragmentator();
4383 
4384  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
4385  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
4386 
4387  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4388 
4389  VkResult Defragment(
4390  VkDeviceSize maxBytesToMove,
4391  uint32_t maxAllocationsToMove);
4392 };
4393 
4394 // Main allocator object.
4395 struct VmaAllocator_T
4396 {
4397  bool m_UseMutex;
4398  bool m_UseKhrDedicatedAllocation;
4399  VkDevice m_hDevice;
4400  bool m_AllocationCallbacksSpecified;
4401  VkAllocationCallbacks m_AllocationCallbacks;
4402  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
4403 
4404  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
4405  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4406  VMA_MUTEX m_HeapSizeLimitMutex;
4407 
4408  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4409  VkPhysicalDeviceMemoryProperties m_MemProps;
4410 
4411  // Default pools.
4412  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4413 
4414  // Each vector is sorted by memory (handle value).
4415  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4416  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4417  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4418 
4419  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
4420  ~VmaAllocator_T();
4421 
4422  const VkAllocationCallbacks* GetAllocationCallbacks() const
4423  {
4424  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4425  }
4426  const VmaVulkanFunctions& GetVulkanFunctions() const
4427  {
4428  return m_VulkanFunctions;
4429  }
4430 
4431  VkDeviceSize GetBufferImageGranularity() const
4432  {
4433  return VMA_MAX(
4434  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4435  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4436  }
4437 
4438  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
4439  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
4440 
4441  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
4442  {
4443  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4444  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4445  }
4446 
4447  void GetBufferMemoryRequirements(
4448  VkBuffer hBuffer,
4449  VkMemoryRequirements& memReq,
4450  bool& requiresDedicatedAllocation,
4451  bool& prefersDedicatedAllocation) const;
4452  void GetImageMemoryRequirements(
4453  VkImage hImage,
4454  VkMemoryRequirements& memReq,
4455  bool& requiresDedicatedAllocation,
4456  bool& prefersDedicatedAllocation) const;
4457 
4458  // Main allocation function.
4459  VkResult AllocateMemory(
4460  const VkMemoryRequirements& vkMemReq,
4461  bool requiresDedicatedAllocation,
4462  bool prefersDedicatedAllocation,
4463  VkBuffer dedicatedBuffer,
4464  VkImage dedicatedImage,
4465  const VmaAllocationCreateInfo& createInfo,
4466  VmaSuballocationType suballocType,
4467  VmaAllocation* pAllocation);
4468 
4469  // Main deallocation function.
4470  void FreeMemory(const VmaAllocation allocation);
4471 
4472  void CalculateStats(VmaStats* pStats);
4473 
4474 #if VMA_STATS_STRING_ENABLED
4475  void PrintDetailedMap(class VmaJsonWriter& json);
4476 #endif
4477 
4478  VkResult Defragment(
4479  VmaAllocation* pAllocations,
4480  size_t allocationCount,
4481  VkBool32* pAllocationsChanged,
4482  const VmaDefragmentationInfo* pDefragmentationInfo,
4483  VmaDefragmentationStats* pDefragmentationStats);
4484 
4485  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
4486  bool TouchAllocation(VmaAllocation hAllocation);
4487 
4488  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
4489  void DestroyPool(VmaPool pool);
4490  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
4491 
4492  void SetCurrentFrameIndex(uint32_t frameIndex);
4493 
4494  void MakePoolAllocationsLost(
4495  VmaPool hPool,
4496  size_t* pLostAllocationCount);
4497 
4498  void CreateLostAllocation(VmaAllocation* pAllocation);
4499 
4500  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4501  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4502 
4503  VkResult Map(VmaAllocation hAllocation, void** ppData);
4504  void Unmap(VmaAllocation hAllocation);
4505 
4506  VkResult BindBufferMemory(VmaAllocation hAllocation, VkBuffer hBuffer);
4507  VkResult BindImageMemory(VmaAllocation hAllocation, VkImage hImage);
4508 
4509 private:
4510  VkDeviceSize m_PreferredLargeHeapBlockSize;
4511 
4512  VkPhysicalDevice m_PhysicalDevice;
4513  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4514 
4515  VMA_MUTEX m_PoolsMutex;
4516  // Protected by m_PoolsMutex. Sorted by pointer value.
4517  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4518 
4519  VmaVulkanFunctions m_VulkanFunctions;
4520 
4521  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
4522 
4523  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4524 
4525  VkResult AllocateMemoryOfType(
4526  const VkMemoryRequirements& vkMemReq,
4527  bool dedicatedAllocation,
4528  VkBuffer dedicatedBuffer,
4529  VkImage dedicatedImage,
4530  const VmaAllocationCreateInfo& createInfo,
4531  uint32_t memTypeIndex,
4532  VmaSuballocationType suballocType,
4533  VmaAllocation* pAllocation);
4534 
4535  // Allocates and registers new VkDeviceMemory specifically for single allocation.
4536  VkResult AllocateDedicatedMemory(
4537  VkDeviceSize size,
4538  VmaSuballocationType suballocType,
4539  uint32_t memTypeIndex,
4540  bool map,
4541  bool isUserDataString,
4542  void* pUserData,
4543  VkBuffer dedicatedBuffer,
4544  VkImage dedicatedImage,
4545  VmaAllocation* pAllocation);
4546 
4547  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
4548  void FreeDedicatedMemory(VmaAllocation allocation);
4549 };
4550 
4552 // Memory allocation #2 after VmaAllocator_T definition
4553 
4554 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
4555 {
4556  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4557 }
4558 
4559 static void VmaFree(VmaAllocator hAllocator, void* ptr)
4560 {
4561  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4562 }
4563 
4564 template<typename T>
4565 static T* VmaAllocate(VmaAllocator hAllocator)
4566 {
4567  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
4568 }
4569 
4570 template<typename T>
4571 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
4572 {
4573  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
4574 }
4575 
4576 template<typename T>
4577 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4578 {
4579  if(ptr != VMA_NULL)
4580  {
4581  ptr->~T();
4582  VmaFree(hAllocator, ptr);
4583  }
4584 }
4585 
4586 template<typename T>
4587 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
4588 {
4589  if(ptr != VMA_NULL)
4590  {
4591  for(size_t i = count; i--; )
4592  ptr[i].~T();
4593  VmaFree(hAllocator, ptr);
4594  }
4595 }
4596 
4598 // VmaStringBuilder
4599 
4600 #if VMA_STATS_STRING_ENABLED
4601 
4602 class VmaStringBuilder
4603 {
4604 public:
4605  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4606  size_t GetLength() const { return m_Data.size(); }
4607  const char* GetData() const { return m_Data.data(); }
4608 
4609  void Add(char ch) { m_Data.push_back(ch); }
4610  void Add(const char* pStr);
4611  void AddNewLine() { Add('\n'); }
4612  void AddNumber(uint32_t num);
4613  void AddNumber(uint64_t num);
4614  void AddPointer(const void* ptr);
4615 
4616 private:
4617  VmaVector< char, VmaStlAllocator<char> > m_Data;
4618 };
4619 
4620 void VmaStringBuilder::Add(const char* pStr)
4621 {
4622  const size_t strLen = strlen(pStr);
4623  if(strLen > 0)
4624  {
4625  const size_t oldCount = m_Data.size();
4626  m_Data.resize(oldCount + strLen);
4627  memcpy(m_Data.data() + oldCount, pStr, strLen);
4628  }
4629 }
4630 
4631 void VmaStringBuilder::AddNumber(uint32_t num)
4632 {
4633  char buf[11];
4634  VmaUint32ToStr(buf, sizeof(buf), num);
4635  Add(buf);
4636 }
4637 
4638 void VmaStringBuilder::AddNumber(uint64_t num)
4639 {
4640  char buf[21];
4641  VmaUint64ToStr(buf, sizeof(buf), num);
4642  Add(buf);
4643 }
4644 
4645 void VmaStringBuilder::AddPointer(const void* ptr)
4646 {
4647  char buf[21];
4648  VmaPtrToStr(buf, sizeof(buf), ptr);
4649  Add(buf);
4650 }
4651 
4652 #endif // #if VMA_STATS_STRING_ENABLED
4653 
4655 // VmaJsonWriter
4656 
4657 #if VMA_STATS_STRING_ENABLED
4658 
4659 class VmaJsonWriter
4660 {
4661 public:
4662  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4663  ~VmaJsonWriter();
4664 
4665  void BeginObject(bool singleLine = false);
4666  void EndObject();
4667 
4668  void BeginArray(bool singleLine = false);
4669  void EndArray();
4670 
4671  void WriteString(const char* pStr);
4672  void BeginString(const char* pStr = VMA_NULL);
4673  void ContinueString(const char* pStr);
4674  void ContinueString(uint32_t n);
4675  void ContinueString(uint64_t n);
4676  void ContinueString_Pointer(const void* ptr);
4677  void EndString(const char* pStr = VMA_NULL);
4678 
4679  void WriteNumber(uint32_t n);
4680  void WriteNumber(uint64_t n);
4681  void WriteBool(bool b);
4682  void WriteNull();
4683 
4684 private:
4685  static const char* const INDENT;
4686 
4687  enum COLLECTION_TYPE
4688  {
4689  COLLECTION_TYPE_OBJECT,
4690  COLLECTION_TYPE_ARRAY,
4691  };
4692  struct StackItem
4693  {
4694  COLLECTION_TYPE type;
4695  uint32_t valueCount;
4696  bool singleLineMode;
4697  };
4698 
4699  VmaStringBuilder& m_SB;
4700  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4701  bool m_InsideString;
4702 
4703  void BeginValue(bool isString);
4704  void WriteIndent(bool oneLess = false);
4705 };
4706 
4707 const char* const VmaJsonWriter::INDENT = " ";
4708 
4709 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4710  m_SB(sb),
4711  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4712  m_InsideString(false)
4713 {
4714 }
4715 
4716 VmaJsonWriter::~VmaJsonWriter()
4717 {
4718  VMA_ASSERT(!m_InsideString);
4719  VMA_ASSERT(m_Stack.empty());
4720 }
4721 
4722 void VmaJsonWriter::BeginObject(bool singleLine)
4723 {
4724  VMA_ASSERT(!m_InsideString);
4725 
4726  BeginValue(false);
4727  m_SB.Add('{');
4728 
4729  StackItem item;
4730  item.type = COLLECTION_TYPE_OBJECT;
4731  item.valueCount = 0;
4732  item.singleLineMode = singleLine;
4733  m_Stack.push_back(item);
4734 }
4735 
4736 void VmaJsonWriter::EndObject()
4737 {
4738  VMA_ASSERT(!m_InsideString);
4739 
4740  WriteIndent(true);
4741  m_SB.Add('}');
4742 
4743  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4744  m_Stack.pop_back();
4745 }
4746 
4747 void VmaJsonWriter::BeginArray(bool singleLine)
4748 {
4749  VMA_ASSERT(!m_InsideString);
4750 
4751  BeginValue(false);
4752  m_SB.Add('[');
4753 
4754  StackItem item;
4755  item.type = COLLECTION_TYPE_ARRAY;
4756  item.valueCount = 0;
4757  item.singleLineMode = singleLine;
4758  m_Stack.push_back(item);
4759 }
4760 
4761 void VmaJsonWriter::EndArray()
4762 {
4763  VMA_ASSERT(!m_InsideString);
4764 
4765  WriteIndent(true);
4766  m_SB.Add(']');
4767 
4768  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4769  m_Stack.pop_back();
4770 }
4771 
4772 void VmaJsonWriter::WriteString(const char* pStr)
4773 {
4774  BeginString(pStr);
4775  EndString();
4776 }
4777 
4778 void VmaJsonWriter::BeginString(const char* pStr)
4779 {
4780  VMA_ASSERT(!m_InsideString);
4781 
4782  BeginValue(true);
4783  m_SB.Add('"');
4784  m_InsideString = true;
4785  if(pStr != VMA_NULL && pStr[0] != '\0')
4786  {
4787  ContinueString(pStr);
4788  }
4789 }
4790 
4791 void VmaJsonWriter::ContinueString(const char* pStr)
4792 {
4793  VMA_ASSERT(m_InsideString);
4794 
4795  const size_t strLen = strlen(pStr);
4796  for(size_t i = 0; i < strLen; ++i)
4797  {
4798  char ch = pStr[i];
4799  if(ch == '\'')
4800  {
4801  m_SB.Add("\\\\");
4802  }
4803  else if(ch == '"')
4804  {
4805  m_SB.Add("\\\"");
4806  }
4807  else if(ch >= 32)
4808  {
4809  m_SB.Add(ch);
4810  }
4811  else switch(ch)
4812  {
4813  case '\b':
4814  m_SB.Add("\\b");
4815  break;
4816  case '\f':
4817  m_SB.Add("\\f");
4818  break;
4819  case '\n':
4820  m_SB.Add("\\n");
4821  break;
4822  case '\r':
4823  m_SB.Add("\\r");
4824  break;
4825  case '\t':
4826  m_SB.Add("\\t");
4827  break;
4828  default:
4829  VMA_ASSERT(0 && "Character not currently supported.");
4830  break;
4831  }
4832  }
4833 }
4834 
4835 void VmaJsonWriter::ContinueString(uint32_t n)
4836 {
4837  VMA_ASSERT(m_InsideString);
4838  m_SB.AddNumber(n);
4839 }
4840 
4841 void VmaJsonWriter::ContinueString(uint64_t n)
4842 {
4843  VMA_ASSERT(m_InsideString);
4844  m_SB.AddNumber(n);
4845 }
4846 
4847 void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
4848 {
4849  VMA_ASSERT(m_InsideString);
4850  m_SB.AddPointer(ptr);
4851 }
4852 
4853 void VmaJsonWriter::EndString(const char* pStr)
4854 {
4855  VMA_ASSERT(m_InsideString);
4856  if(pStr != VMA_NULL && pStr[0] != '\0')
4857  {
4858  ContinueString(pStr);
4859  }
4860  m_SB.Add('"');
4861  m_InsideString = false;
4862 }
4863 
4864 void VmaJsonWriter::WriteNumber(uint32_t n)
4865 {
4866  VMA_ASSERT(!m_InsideString);
4867  BeginValue(false);
4868  m_SB.AddNumber(n);
4869 }
4870 
4871 void VmaJsonWriter::WriteNumber(uint64_t n)
4872 {
4873  VMA_ASSERT(!m_InsideString);
4874  BeginValue(false);
4875  m_SB.AddNumber(n);
4876 }
4877 
4878 void VmaJsonWriter::WriteBool(bool b)
4879 {
4880  VMA_ASSERT(!m_InsideString);
4881  BeginValue(false);
4882  m_SB.Add(b ? "true" : "false");
4883 }
4884 
4885 void VmaJsonWriter::WriteNull()
4886 {
4887  VMA_ASSERT(!m_InsideString);
4888  BeginValue(false);
4889  m_SB.Add("null");
4890 }
4891 
4892 void VmaJsonWriter::BeginValue(bool isString)
4893 {
4894  if(!m_Stack.empty())
4895  {
4896  StackItem& currItem = m_Stack.back();
4897  if(currItem.type == COLLECTION_TYPE_OBJECT &&
4898  currItem.valueCount % 2 == 0)
4899  {
4900  VMA_ASSERT(isString);
4901  }
4902 
4903  if(currItem.type == COLLECTION_TYPE_OBJECT &&
4904  currItem.valueCount % 2 != 0)
4905  {
4906  m_SB.Add(": ");
4907  }
4908  else if(currItem.valueCount > 0)
4909  {
4910  m_SB.Add(", ");
4911  WriteIndent();
4912  }
4913  else
4914  {
4915  WriteIndent();
4916  }
4917  ++currItem.valueCount;
4918  }
4919 }
4920 
4921 void VmaJsonWriter::WriteIndent(bool oneLess)
4922 {
4923  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4924  {
4925  m_SB.AddNewLine();
4926 
4927  size_t count = m_Stack.size();
4928  if(count > 0 && oneLess)
4929  {
4930  --count;
4931  }
4932  for(size_t i = 0; i < count; ++i)
4933  {
4934  m_SB.Add(INDENT);
4935  }
4936  }
4937 }
4938 
4939 #endif // #if VMA_STATS_STRING_ENABLED
4940 
4942 
4943 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
4944 {
4945  if(IsUserDataString())
4946  {
4947  VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4948 
4949  FreeUserDataString(hAllocator);
4950 
4951  if(pUserData != VMA_NULL)
4952  {
4953  const char* const newStrSrc = (char*)pUserData;
4954  const size_t newStrLen = strlen(newStrSrc);
4955  char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
4956  memcpy(newStrDst, newStrSrc, newStrLen + 1);
4957  m_pUserData = newStrDst;
4958  }
4959  }
4960  else
4961  {
4962  m_pUserData = pUserData;
4963  }
4964 }
4965 
4966 void VmaAllocation_T::ChangeBlockAllocation(
4967  VmaAllocator hAllocator,
4968  VmaDeviceMemoryBlock* block,
4969  VkDeviceSize offset)
4970 {
4971  VMA_ASSERT(block != VMA_NULL);
4972  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4973 
4974  // Move mapping reference counter from old block to new block.
4975  if(block != m_BlockAllocation.m_Block)
4976  {
4977  uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4978  if(IsPersistentMap())
4979  ++mapRefCount;
4980  m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4981  block->Map(hAllocator, mapRefCount, VMA_NULL);
4982  }
4983 
4984  m_BlockAllocation.m_Block = block;
4985  m_BlockAllocation.m_Offset = offset;
4986 }
4987 
4988 VkDeviceSize VmaAllocation_T::GetOffset() const
4989 {
4990  switch(m_Type)
4991  {
4992  case ALLOCATION_TYPE_BLOCK:
4993  return m_BlockAllocation.m_Offset;
4994  case ALLOCATION_TYPE_DEDICATED:
4995  return 0;
4996  default:
4997  VMA_ASSERT(0);
4998  return 0;
4999  }
5000 }
5001 
5002 VkDeviceMemory VmaAllocation_T::GetMemory() const
5003 {
5004  switch(m_Type)
5005  {
5006  case ALLOCATION_TYPE_BLOCK:
5007  return m_BlockAllocation.m_Block->GetDeviceMemory();
5008  case ALLOCATION_TYPE_DEDICATED:
5009  return m_DedicatedAllocation.m_hMemory;
5010  default:
5011  VMA_ASSERT(0);
5012  return VK_NULL_HANDLE;
5013  }
5014 }
5015 
5016 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
5017 {
5018  switch(m_Type)
5019  {
5020  case ALLOCATION_TYPE_BLOCK:
5021  return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5022  case ALLOCATION_TYPE_DEDICATED:
5023  return m_DedicatedAllocation.m_MemoryTypeIndex;
5024  default:
5025  VMA_ASSERT(0);
5026  return UINT32_MAX;
5027  }
5028 }
5029 
5030 void* VmaAllocation_T::GetMappedData() const
5031 {
5032  switch(m_Type)
5033  {
5034  case ALLOCATION_TYPE_BLOCK:
5035  if(m_MapCount != 0)
5036  {
5037  void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5038  VMA_ASSERT(pBlockData != VMA_NULL);
5039  return (char*)pBlockData + m_BlockAllocation.m_Offset;
5040  }
5041  else
5042  {
5043  return VMA_NULL;
5044  }
5045  break;
5046  case ALLOCATION_TYPE_DEDICATED:
5047  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5048  return m_DedicatedAllocation.m_pMappedData;
5049  default:
5050  VMA_ASSERT(0);
5051  return VMA_NULL;
5052  }
5053 }
5054 
5055 bool VmaAllocation_T::CanBecomeLost() const
5056 {
5057  switch(m_Type)
5058  {
5059  case ALLOCATION_TYPE_BLOCK:
5060  return m_BlockAllocation.m_CanBecomeLost;
5061  case ALLOCATION_TYPE_DEDICATED:
5062  return false;
5063  default:
5064  VMA_ASSERT(0);
5065  return false;
5066  }
5067 }
5068 
5069 VmaPool VmaAllocation_T::GetPool() const
5070 {
5071  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5072  return m_BlockAllocation.m_hPool;
5073 }
5074 
5075 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5076 {
5077  VMA_ASSERT(CanBecomeLost());
5078 
5079  /*
5080  Warning: This is a carefully designed algorithm.
5081  Do not modify unless you really know what you're doing :)
5082  */
5083  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5084  for(;;)
5085  {
5086  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5087  {
5088  VMA_ASSERT(0);
5089  return false;
5090  }
5091  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5092  {
5093  return false;
5094  }
5095  else // Last use time earlier than current time.
5096  {
5097  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5098  {
5099  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
5100  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
5101  return true;
5102  }
5103  }
5104  }
5105 }
5106 
5107 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
5108 {
5109  VMA_ASSERT(IsUserDataString());
5110  if(m_pUserData != VMA_NULL)
5111  {
5112  char* const oldStr = (char*)m_pUserData;
5113  const size_t oldStrLen = strlen(oldStr);
5114  vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5115  m_pUserData = VMA_NULL;
5116  }
5117 }
5118 
5119 void VmaAllocation_T::BlockAllocMap()
5120 {
5121  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5122 
5123  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5124  {
5125  ++m_MapCount;
5126  }
5127  else
5128  {
5129  VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
5130  }
5131 }
5132 
5133 void VmaAllocation_T::BlockAllocUnmap()
5134 {
5135  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5136 
5137  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5138  {
5139  --m_MapCount;
5140  }
5141  else
5142  {
5143  VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
5144  }
5145 }
5146 
5147 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
5148 {
5149  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5150 
5151  if(m_MapCount != 0)
5152  {
5153  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5154  {
5155  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5156  *ppData = m_DedicatedAllocation.m_pMappedData;
5157  ++m_MapCount;
5158  return VK_SUCCESS;
5159  }
5160  else
5161  {
5162  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
5163  return VK_ERROR_MEMORY_MAP_FAILED;
5164  }
5165  }
5166  else
5167  {
5168  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5169  hAllocator->m_hDevice,
5170  m_DedicatedAllocation.m_hMemory,
5171  0, // offset
5172  VK_WHOLE_SIZE,
5173  0, // flags
5174  ppData);
5175  if(result == VK_SUCCESS)
5176  {
5177  m_DedicatedAllocation.m_pMappedData = *ppData;
5178  m_MapCount = 1;
5179  }
5180  return result;
5181  }
5182 }
5183 
5184 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
5185 {
5186  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5187 
5188  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5189  {
5190  --m_MapCount;
5191  if(m_MapCount == 0)
5192  {
5193  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5194  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5195  hAllocator->m_hDevice,
5196  m_DedicatedAllocation.m_hMemory);
5197  }
5198  }
5199  else
5200  {
5201  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
5202  }
5203 }
5204 
5205 #if VMA_STATS_STRING_ENABLED
5206 
5207 // Correspond to values of enum VmaSuballocationType.
5208 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5209  "FREE",
5210  "UNKNOWN",
5211  "BUFFER",
5212  "IMAGE_UNKNOWN",
5213  "IMAGE_LINEAR",
5214  "IMAGE_OPTIMAL",
5215 };
5216 
5217 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
5218 {
5219  json.BeginObject();
5220 
5221  json.WriteString("Blocks");
5222  json.WriteNumber(stat.blockCount);
5223 
5224  json.WriteString("Allocations");
5225  json.WriteNumber(stat.allocationCount);
5226 
5227  json.WriteString("UnusedRanges");
5228  json.WriteNumber(stat.unusedRangeCount);
5229 
5230  json.WriteString("UsedBytes");
5231  json.WriteNumber(stat.usedBytes);
5232 
5233  json.WriteString("UnusedBytes");
5234  json.WriteNumber(stat.unusedBytes);
5235 
5236  if(stat.allocationCount > 1)
5237  {
5238  json.WriteString("AllocationSize");
5239  json.BeginObject(true);
5240  json.WriteString("Min");
5241  json.WriteNumber(stat.allocationSizeMin);
5242  json.WriteString("Avg");
5243  json.WriteNumber(stat.allocationSizeAvg);
5244  json.WriteString("Max");
5245  json.WriteNumber(stat.allocationSizeMax);
5246  json.EndObject();
5247  }
5248 
5249  if(stat.unusedRangeCount > 1)
5250  {
5251  json.WriteString("UnusedRangeSize");
5252  json.BeginObject(true);
5253  json.WriteString("Min");
5254  json.WriteNumber(stat.unusedRangeSizeMin);
5255  json.WriteString("Avg");
5256  json.WriteNumber(stat.unusedRangeSizeAvg);
5257  json.WriteString("Max");
5258  json.WriteNumber(stat.unusedRangeSizeMax);
5259  json.EndObject();
5260  }
5261 
5262  json.EndObject();
5263 }
5264 
5265 #endif // #if VMA_STATS_STRING_ENABLED
5266 
5267 struct VmaSuballocationItemSizeLess
5268 {
5269  bool operator()(
5270  const VmaSuballocationList::iterator lhs,
5271  const VmaSuballocationList::iterator rhs) const
5272  {
5273  return lhs->size < rhs->size;
5274  }
5275  bool operator()(
5276  const VmaSuballocationList::iterator lhs,
5277  VkDeviceSize rhsSize) const
5278  {
5279  return lhs->size < rhsSize;
5280  }
5281 };
5282 
5284 // class VmaBlockMetadata
5285 
5286 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
5287  m_Size(0),
5288  m_FreeCount(0),
5289  m_SumFreeSize(0),
5290  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5291  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5292 {
5293 }
5294 
5295 VmaBlockMetadata::~VmaBlockMetadata()
5296 {
5297 }
5298 
5299 void VmaBlockMetadata::Init(VkDeviceSize size)
5300 {
5301  m_Size = size;
5302  m_FreeCount = 1;
5303  m_SumFreeSize = size;
5304 
5305  VmaSuballocation suballoc = {};
5306  suballoc.offset = 0;
5307  suballoc.size = size;
5308  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5309  suballoc.hAllocation = VK_NULL_HANDLE;
5310 
5311  m_Suballocations.push_back(suballoc);
5312  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5313  --suballocItem;
5314  m_FreeSuballocationsBySize.push_back(suballocItem);
5315 }
5316 
5317 bool VmaBlockMetadata::Validate() const
5318 {
5319  if(m_Suballocations.empty())
5320  {
5321  return false;
5322  }
5323 
5324  // Expected offset of new suballocation as calculates from previous ones.
5325  VkDeviceSize calculatedOffset = 0;
5326  // Expected number of free suballocations as calculated from traversing their list.
5327  uint32_t calculatedFreeCount = 0;
5328  // Expected sum size of free suballocations as calculated from traversing their list.
5329  VkDeviceSize calculatedSumFreeSize = 0;
5330  // Expected number of free suballocations that should be registered in
5331  // m_FreeSuballocationsBySize calculated from traversing their list.
5332  size_t freeSuballocationsToRegister = 0;
5333  // True if previous visisted suballocation was free.
5334  bool prevFree = false;
5335 
5336  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5337  suballocItem != m_Suballocations.cend();
5338  ++suballocItem)
5339  {
5340  const VmaSuballocation& subAlloc = *suballocItem;
5341 
5342  // Actual offset of this suballocation doesn't match expected one.
5343  if(subAlloc.offset != calculatedOffset)
5344  {
5345  return false;
5346  }
5347 
5348  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5349  // Two adjacent free suballocations are invalid. They should be merged.
5350  if(prevFree && currFree)
5351  {
5352  return false;
5353  }
5354 
5355  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5356  {
5357  return false;
5358  }
5359 
5360  if(currFree)
5361  {
5362  calculatedSumFreeSize += subAlloc.size;
5363  ++calculatedFreeCount;
5364  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5365  {
5366  ++freeSuballocationsToRegister;
5367  }
5368  }
5369  else
5370  {
5371  if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5372  {
5373  return false;
5374  }
5375  if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5376  {
5377  return false;
5378  }
5379  }
5380 
5381  calculatedOffset += subAlloc.size;
5382  prevFree = currFree;
5383  }
5384 
5385  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
5386  // match expected one.
5387  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5388  {
5389  return false;
5390  }
5391 
5392  VkDeviceSize lastSize = 0;
5393  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5394  {
5395  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5396 
5397  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
5398  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5399  {
5400  return false;
5401  }
5402  // They must be sorted by size ascending.
5403  if(suballocItem->size < lastSize)
5404  {
5405  return false;
5406  }
5407 
5408  lastSize = suballocItem->size;
5409  }
5410 
5411  // Check if totals match calculacted values.
5412  if(!ValidateFreeSuballocationList() ||
5413  (calculatedOffset != m_Size) ||
5414  (calculatedSumFreeSize != m_SumFreeSize) ||
5415  (calculatedFreeCount != m_FreeCount))
5416  {
5417  return false;
5418  }
5419 
5420  return true;
5421 }
5422 
5423 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
5424 {
5425  if(!m_FreeSuballocationsBySize.empty())
5426  {
5427  return m_FreeSuballocationsBySize.back()->size;
5428  }
5429  else
5430  {
5431  return 0;
5432  }
5433 }
5434 
5435 bool VmaBlockMetadata::IsEmpty() const
5436 {
5437  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5438 }
5439 
5440 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
5441 {
5442  outInfo.blockCount = 1;
5443 
5444  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5445  outInfo.allocationCount = rangeCount - m_FreeCount;
5446  outInfo.unusedRangeCount = m_FreeCount;
5447 
5448  outInfo.unusedBytes = m_SumFreeSize;
5449  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
5450 
5451  outInfo.allocationSizeMin = UINT64_MAX;
5452  outInfo.allocationSizeMax = 0;
5453  outInfo.unusedRangeSizeMin = UINT64_MAX;
5454  outInfo.unusedRangeSizeMax = 0;
5455 
5456  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5457  suballocItem != m_Suballocations.cend();
5458  ++suballocItem)
5459  {
5460  const VmaSuballocation& suballoc = *suballocItem;
5461  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5462  {
5463  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
5464  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
5465  }
5466  else
5467  {
5468  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
5469  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
5470  }
5471  }
5472 }
5473 
5474 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
5475 {
5476  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5477 
5478  inoutStats.size += m_Size;
5479  inoutStats.unusedSize += m_SumFreeSize;
5480  inoutStats.allocationCount += rangeCount - m_FreeCount;
5481  inoutStats.unusedRangeCount += m_FreeCount;
5482  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
5483 }
5484 
5485 #if VMA_STATS_STRING_ENABLED
5486 
5487 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
5488 {
5489  json.BeginObject();
5490 
5491  json.WriteString("TotalBytes");
5492  json.WriteNumber(m_Size);
5493 
5494  json.WriteString("UnusedBytes");
5495  json.WriteNumber(m_SumFreeSize);
5496 
5497  json.WriteString("Allocations");
5498  json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5499 
5500  json.WriteString("UnusedRanges");
5501  json.WriteNumber(m_FreeCount);
5502 
5503  json.WriteString("Suballocations");
5504  json.BeginArray();
5505  size_t i = 0;
5506  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5507  suballocItem != m_Suballocations.cend();
5508  ++suballocItem, ++i)
5509  {
5510  json.BeginObject(true);
5511 
5512  json.WriteString("Type");
5513  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5514 
5515  json.WriteString("Size");
5516  json.WriteNumber(suballocItem->size);
5517 
5518  json.WriteString("Offset");
5519  json.WriteNumber(suballocItem->offset);
5520 
5521  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5522  {
5523  const void* pUserData = suballocItem->hAllocation->GetUserData();
5524  if(pUserData != VMA_NULL)
5525  {
5526  json.WriteString("UserData");
5527  if(suballocItem->hAllocation->IsUserDataString())
5528  {
5529  json.WriteString((const char*)pUserData);
5530  }
5531  else
5532  {
5533  json.BeginString();
5534  json.ContinueString_Pointer(pUserData);
5535  json.EndString();
5536  }
5537  }
5538  }
5539 
5540  json.EndObject();
5541  }
5542  json.EndArray();
5543 
5544  json.EndObject();
5545 }
5546 
5547 #endif // #if VMA_STATS_STRING_ENABLED
5548 
5549 /*
5550 How many suitable free suballocations to analyze before choosing best one.
5551 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
5552  be chosen.
5553 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
5554  suballocations will be analized and best one will be chosen.
5555 - Any other value is also acceptable.
5556 */
5557 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
5558 
5559 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5560 {
5561  VMA_ASSERT(IsEmpty());
5562  pAllocationRequest->offset = 0;
5563  pAllocationRequest->sumFreeSize = m_SumFreeSize;
5564  pAllocationRequest->sumItemSize = 0;
5565  pAllocationRequest->item = m_Suballocations.begin();
5566  pAllocationRequest->itemsToMakeLostCount = 0;
5567 }
5568 
5569 bool VmaBlockMetadata::CreateAllocationRequest(
5570  uint32_t currentFrameIndex,
5571  uint32_t frameInUseCount,
5572  VkDeviceSize bufferImageGranularity,
5573  VkDeviceSize allocSize,
5574  VkDeviceSize allocAlignment,
5575  VmaSuballocationType allocType,
5576  bool canMakeOtherLost,
5577  VmaAllocationRequest* pAllocationRequest)
5578 {
5579  VMA_ASSERT(allocSize > 0);
5580  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5581  VMA_ASSERT(pAllocationRequest != VMA_NULL);
5582  VMA_HEAVY_ASSERT(Validate());
5583 
5584  // There is not enough total free space in this block to fullfill the request: Early return.
5585  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
5586  {
5587  return false;
5588  }
5589 
5590  // New algorithm, efficiently searching freeSuballocationsBySize.
5591  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5592  if(freeSuballocCount > 0)
5593  {
5594  if(VMA_BEST_FIT)
5595  {
5596  // Find first free suballocation with size not less than allocSize.
5597  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
5598  m_FreeSuballocationsBySize.data(),
5599  m_FreeSuballocationsBySize.data() + freeSuballocCount,
5600  allocSize,
5601  VmaSuballocationItemSizeLess());
5602  size_t index = it - m_FreeSuballocationsBySize.data();
5603  for(; index < freeSuballocCount; ++index)
5604  {
5605  if(CheckAllocation(
5606  currentFrameIndex,
5607  frameInUseCount,
5608  bufferImageGranularity,
5609  allocSize,
5610  allocAlignment,
5611  allocType,
5612  m_FreeSuballocationsBySize[index],
5613  false, // canMakeOtherLost
5614  &pAllocationRequest->offset,
5615  &pAllocationRequest->itemsToMakeLostCount,
5616  &pAllocationRequest->sumFreeSize,
5617  &pAllocationRequest->sumItemSize))
5618  {
5619  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5620  return true;
5621  }
5622  }
5623  }
5624  else
5625  {
5626  // Search staring from biggest suballocations.
5627  for(size_t index = freeSuballocCount; index--; )
5628  {
5629  if(CheckAllocation(
5630  currentFrameIndex,
5631  frameInUseCount,
5632  bufferImageGranularity,
5633  allocSize,
5634  allocAlignment,
5635  allocType,
5636  m_FreeSuballocationsBySize[index],
5637  false, // canMakeOtherLost
5638  &pAllocationRequest->offset,
5639  &pAllocationRequest->itemsToMakeLostCount,
5640  &pAllocationRequest->sumFreeSize,
5641  &pAllocationRequest->sumItemSize))
5642  {
5643  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5644  return true;
5645  }
5646  }
5647  }
5648  }
5649 
5650  if(canMakeOtherLost)
5651  {
5652  // Brute-force algorithm. TODO: Come up with something better.
5653 
5654  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5655  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5656 
5657  VmaAllocationRequest tmpAllocRequest = {};
5658  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5659  suballocIt != m_Suballocations.end();
5660  ++suballocIt)
5661  {
5662  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5663  suballocIt->hAllocation->CanBecomeLost())
5664  {
5665  if(CheckAllocation(
5666  currentFrameIndex,
5667  frameInUseCount,
5668  bufferImageGranularity,
5669  allocSize,
5670  allocAlignment,
5671  allocType,
5672  suballocIt,
5673  canMakeOtherLost,
5674  &tmpAllocRequest.offset,
5675  &tmpAllocRequest.itemsToMakeLostCount,
5676  &tmpAllocRequest.sumFreeSize,
5677  &tmpAllocRequest.sumItemSize))
5678  {
5679  tmpAllocRequest.item = suballocIt;
5680 
5681  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5682  {
5683  *pAllocationRequest = tmpAllocRequest;
5684  }
5685  }
5686  }
5687  }
5688 
5689  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5690  {
5691  return true;
5692  }
5693  }
5694 
5695  return false;
5696 }
5697 
5698 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5699  uint32_t currentFrameIndex,
5700  uint32_t frameInUseCount,
5701  VmaAllocationRequest* pAllocationRequest)
5702 {
5703  while(pAllocationRequest->itemsToMakeLostCount > 0)
5704  {
5705  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5706  {
5707  ++pAllocationRequest->item;
5708  }
5709  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5710  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5711  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5712  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5713  {
5714  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5715  --pAllocationRequest->itemsToMakeLostCount;
5716  }
5717  else
5718  {
5719  return false;
5720  }
5721  }
5722 
5723  VMA_HEAVY_ASSERT(Validate());
5724  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5725  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5726 
5727  return true;
5728 }
5729 
5730 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5731 {
5732  uint32_t lostAllocationCount = 0;
5733  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5734  it != m_Suballocations.end();
5735  ++it)
5736  {
5737  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5738  it->hAllocation->CanBecomeLost() &&
5739  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5740  {
5741  it = FreeSuballocation(it);
5742  ++lostAllocationCount;
5743  }
5744  }
5745  return lostAllocationCount;
5746 }
5747 
5748 void VmaBlockMetadata::Alloc(
5749  const VmaAllocationRequest& request,
5750  VmaSuballocationType type,
5751  VkDeviceSize allocSize,
5752  VmaAllocation hAllocation)
5753 {
5754  VMA_ASSERT(request.item != m_Suballocations.end());
5755  VmaSuballocation& suballoc = *request.item;
5756  // Given suballocation is a free block.
5757  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5758  // Given offset is inside this suballocation.
5759  VMA_ASSERT(request.offset >= suballoc.offset);
5760  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5761  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5762  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5763 
5764  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
5765  // it to become used.
5766  UnregisterFreeSuballocation(request.item);
5767 
5768  suballoc.offset = request.offset;
5769  suballoc.size = allocSize;
5770  suballoc.type = type;
5771  suballoc.hAllocation = hAllocation;
5772 
5773  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
5774  if(paddingEnd)
5775  {
5776  VmaSuballocation paddingSuballoc = {};
5777  paddingSuballoc.offset = request.offset + allocSize;
5778  paddingSuballoc.size = paddingEnd;
5779  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5780  VmaSuballocationList::iterator next = request.item;
5781  ++next;
5782  const VmaSuballocationList::iterator paddingEndItem =
5783  m_Suballocations.insert(next, paddingSuballoc);
5784  RegisterFreeSuballocation(paddingEndItem);
5785  }
5786 
5787  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
5788  if(paddingBegin)
5789  {
5790  VmaSuballocation paddingSuballoc = {};
5791  paddingSuballoc.offset = request.offset - paddingBegin;
5792  paddingSuballoc.size = paddingBegin;
5793  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5794  const VmaSuballocationList::iterator paddingBeginItem =
5795  m_Suballocations.insert(request.item, paddingSuballoc);
5796  RegisterFreeSuballocation(paddingBeginItem);
5797  }
5798 
5799  // Update totals.
5800  m_FreeCount = m_FreeCount - 1;
5801  if(paddingBegin > 0)
5802  {
5803  ++m_FreeCount;
5804  }
5805  if(paddingEnd > 0)
5806  {
5807  ++m_FreeCount;
5808  }
5809  m_SumFreeSize -= allocSize;
5810 }
5811 
5812 void VmaBlockMetadata::Free(const VmaAllocation allocation)
5813 {
5814  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5815  suballocItem != m_Suballocations.end();
5816  ++suballocItem)
5817  {
5818  VmaSuballocation& suballoc = *suballocItem;
5819  if(suballoc.hAllocation == allocation)
5820  {
5821  FreeSuballocation(suballocItem);
5822  VMA_HEAVY_ASSERT(Validate());
5823  return;
5824  }
5825  }
5826  VMA_ASSERT(0 && "Not found!");
5827 }
5828 
5829 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5830 {
5831  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5832  suballocItem != m_Suballocations.end();
5833  ++suballocItem)
5834  {
5835  VmaSuballocation& suballoc = *suballocItem;
5836  if(suballoc.offset == offset)
5837  {
5838  FreeSuballocation(suballocItem);
5839  return;
5840  }
5841  }
5842  VMA_ASSERT(0 && "Not found!");
5843 }
5844 
5845 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
5846 {
5847  VkDeviceSize lastSize = 0;
5848  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5849  {
5850  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5851 
5852  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5853  {
5854  VMA_ASSERT(0);
5855  return false;
5856  }
5857  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5858  {
5859  VMA_ASSERT(0);
5860  return false;
5861  }
5862  if(it->size < lastSize)
5863  {
5864  VMA_ASSERT(0);
5865  return false;
5866  }
5867 
5868  lastSize = it->size;
5869  }
5870  return true;
5871 }
5872 
5873 bool VmaBlockMetadata::CheckAllocation(
5874  uint32_t currentFrameIndex,
5875  uint32_t frameInUseCount,
5876  VkDeviceSize bufferImageGranularity,
5877  VkDeviceSize allocSize,
5878  VkDeviceSize allocAlignment,
5879  VmaSuballocationType allocType,
5880  VmaSuballocationList::const_iterator suballocItem,
5881  bool canMakeOtherLost,
5882  VkDeviceSize* pOffset,
5883  size_t* itemsToMakeLostCount,
5884  VkDeviceSize* pSumFreeSize,
5885  VkDeviceSize* pSumItemSize) const
5886 {
5887  VMA_ASSERT(allocSize > 0);
5888  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5889  VMA_ASSERT(suballocItem != m_Suballocations.cend());
5890  VMA_ASSERT(pOffset != VMA_NULL);
5891 
5892  *itemsToMakeLostCount = 0;
5893  *pSumFreeSize = 0;
5894  *pSumItemSize = 0;
5895 
5896  if(canMakeOtherLost)
5897  {
5898  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5899  {
5900  *pSumFreeSize = suballocItem->size;
5901  }
5902  else
5903  {
5904  if(suballocItem->hAllocation->CanBecomeLost() &&
5905  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5906  {
5907  ++*itemsToMakeLostCount;
5908  *pSumItemSize = suballocItem->size;
5909  }
5910  else
5911  {
5912  return false;
5913  }
5914  }
5915 
5916  // Remaining size is too small for this request: Early return.
5917  if(m_Size - suballocItem->offset < allocSize)
5918  {
5919  return false;
5920  }
5921 
5922  // Start from offset equal to beginning of this suballocation.
5923  *pOffset = suballocItem->offset;
5924 
5925  // Apply VMA_DEBUG_MARGIN at the beginning.
5926  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5927  {
5928  *pOffset += VMA_DEBUG_MARGIN;
5929  }
5930 
5931  // Apply alignment.
5932  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5933  *pOffset = VmaAlignUp(*pOffset, alignment);
5934 
5935  // Check previous suballocations for BufferImageGranularity conflicts.
5936  // Make bigger alignment if necessary.
5937  if(bufferImageGranularity > 1)
5938  {
5939  bool bufferImageGranularityConflict = false;
5940  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5941  while(prevSuballocItem != m_Suballocations.cbegin())
5942  {
5943  --prevSuballocItem;
5944  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5945  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5946  {
5947  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5948  {
5949  bufferImageGranularityConflict = true;
5950  break;
5951  }
5952  }
5953  else
5954  // Already on previous page.
5955  break;
5956  }
5957  if(bufferImageGranularityConflict)
5958  {
5959  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5960  }
5961  }
5962 
5963  // Now that we have final *pOffset, check if we are past suballocItem.
5964  // If yes, return false - this function should be called for another suballocItem as starting point.
5965  if(*pOffset >= suballocItem->offset + suballocItem->size)
5966  {
5967  return false;
5968  }
5969 
5970  // Calculate padding at the beginning based on current offset.
5971  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5972 
5973  // Calculate required margin at the end if this is not last suballocation.
5974  VmaSuballocationList::const_iterator next = suballocItem;
5975  ++next;
5976  const VkDeviceSize requiredEndMargin =
5977  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5978 
5979  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5980  // Another early return check.
5981  if(suballocItem->offset + totalSize > m_Size)
5982  {
5983  return false;
5984  }
5985 
5986  // Advance lastSuballocItem until desired size is reached.
5987  // Update itemsToMakeLostCount.
5988  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5989  if(totalSize > suballocItem->size)
5990  {
5991  VkDeviceSize remainingSize = totalSize - suballocItem->size;
5992  while(remainingSize > 0)
5993  {
5994  ++lastSuballocItem;
5995  if(lastSuballocItem == m_Suballocations.cend())
5996  {
5997  return false;
5998  }
5999  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6000  {
6001  *pSumFreeSize += lastSuballocItem->size;
6002  }
6003  else
6004  {
6005  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
6006  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
6007  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6008  {
6009  ++*itemsToMakeLostCount;
6010  *pSumItemSize += lastSuballocItem->size;
6011  }
6012  else
6013  {
6014  return false;
6015  }
6016  }
6017  remainingSize = (lastSuballocItem->size < remainingSize) ?
6018  remainingSize - lastSuballocItem->size : 0;
6019  }
6020  }
6021 
6022  // Check next suballocations for BufferImageGranularity conflicts.
6023  // If conflict exists, we must mark more allocations lost or fail.
6024  if(bufferImageGranularity > 1)
6025  {
6026  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6027  ++nextSuballocItem;
6028  while(nextSuballocItem != m_Suballocations.cend())
6029  {
6030  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6031  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6032  {
6033  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6034  {
6035  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6036  if(nextSuballoc.hAllocation->CanBecomeLost() &&
6037  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6038  {
6039  ++*itemsToMakeLostCount;
6040  }
6041  else
6042  {
6043  return false;
6044  }
6045  }
6046  }
6047  else
6048  {
6049  // Already on next page.
6050  break;
6051  }
6052  ++nextSuballocItem;
6053  }
6054  }
6055  }
6056  else
6057  {
6058  const VmaSuballocation& suballoc = *suballocItem;
6059  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6060 
6061  *pSumFreeSize = suballoc.size;
6062 
6063  // Size of this suballocation is too small for this request: Early return.
6064  if(suballoc.size < allocSize)
6065  {
6066  return false;
6067  }
6068 
6069  // Start from offset equal to beginning of this suballocation.
6070  *pOffset = suballoc.offset;
6071 
6072  // Apply VMA_DEBUG_MARGIN at the beginning.
6073  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
6074  {
6075  *pOffset += VMA_DEBUG_MARGIN;
6076  }
6077 
6078  // Apply alignment.
6079  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
6080  *pOffset = VmaAlignUp(*pOffset, alignment);
6081 
6082  // Check previous suballocations for BufferImageGranularity conflicts.
6083  // Make bigger alignment if necessary.
6084  if(bufferImageGranularity > 1)
6085  {
6086  bool bufferImageGranularityConflict = false;
6087  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6088  while(prevSuballocItem != m_Suballocations.cbegin())
6089  {
6090  --prevSuballocItem;
6091  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6092  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6093  {
6094  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6095  {
6096  bufferImageGranularityConflict = true;
6097  break;
6098  }
6099  }
6100  else
6101  // Already on previous page.
6102  break;
6103  }
6104  if(bufferImageGranularityConflict)
6105  {
6106  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6107  }
6108  }
6109 
6110  // Calculate padding at the beginning based on current offset.
6111  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6112 
6113  // Calculate required margin at the end if this is not last suballocation.
6114  VmaSuballocationList::const_iterator next = suballocItem;
6115  ++next;
6116  const VkDeviceSize requiredEndMargin =
6117  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
6118 
6119  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
6120  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6121  {
6122  return false;
6123  }
6124 
6125  // Check next suballocations for BufferImageGranularity conflicts.
6126  // If conflict exists, allocation cannot be made here.
6127  if(bufferImageGranularity > 1)
6128  {
6129  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6130  ++nextSuballocItem;
6131  while(nextSuballocItem != m_Suballocations.cend())
6132  {
6133  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6134  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6135  {
6136  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6137  {
6138  return false;
6139  }
6140  }
6141  else
6142  {
6143  // Already on next page.
6144  break;
6145  }
6146  ++nextSuballocItem;
6147  }
6148  }
6149  }
6150 
6151  // All tests passed: Success. pOffset is already filled.
6152  return true;
6153 }
6154 
6155 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6156 {
6157  VMA_ASSERT(item != m_Suballocations.end());
6158  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6159 
6160  VmaSuballocationList::iterator nextItem = item;
6161  ++nextItem;
6162  VMA_ASSERT(nextItem != m_Suballocations.end());
6163  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6164 
6165  item->size += nextItem->size;
6166  --m_FreeCount;
6167  m_Suballocations.erase(nextItem);
6168 }
6169 
6170 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6171 {
6172  // Change this suballocation to be marked as free.
6173  VmaSuballocation& suballoc = *suballocItem;
6174  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6175  suballoc.hAllocation = VK_NULL_HANDLE;
6176 
6177  // Update totals.
6178  ++m_FreeCount;
6179  m_SumFreeSize += suballoc.size;
6180 
6181  // Merge with previous and/or next suballocation if it's also free.
6182  bool mergeWithNext = false;
6183  bool mergeWithPrev = false;
6184 
6185  VmaSuballocationList::iterator nextItem = suballocItem;
6186  ++nextItem;
6187  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6188  {
6189  mergeWithNext = true;
6190  }
6191 
6192  VmaSuballocationList::iterator prevItem = suballocItem;
6193  if(suballocItem != m_Suballocations.begin())
6194  {
6195  --prevItem;
6196  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6197  {
6198  mergeWithPrev = true;
6199  }
6200  }
6201 
6202  if(mergeWithNext)
6203  {
6204  UnregisterFreeSuballocation(nextItem);
6205  MergeFreeWithNext(suballocItem);
6206  }
6207 
6208  if(mergeWithPrev)
6209  {
6210  UnregisterFreeSuballocation(prevItem);
6211  MergeFreeWithNext(prevItem);
6212  RegisterFreeSuballocation(prevItem);
6213  return prevItem;
6214  }
6215  else
6216  {
6217  RegisterFreeSuballocation(suballocItem);
6218  return suballocItem;
6219  }
6220 }
6221 
6222 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6223 {
6224  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6225  VMA_ASSERT(item->size > 0);
6226 
6227  // You may want to enable this validation at the beginning or at the end of
6228  // this function, depending on what do you want to check.
6229  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6230 
6231  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6232  {
6233  if(m_FreeSuballocationsBySize.empty())
6234  {
6235  m_FreeSuballocationsBySize.push_back(item);
6236  }
6237  else
6238  {
6239  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6240  }
6241  }
6242 
6243  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6244 }
6245 
6246 
6247 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6248 {
6249  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6250  VMA_ASSERT(item->size > 0);
6251 
6252  // You may want to enable this validation at the beginning or at the end of
6253  // this function, depending on what do you want to check.
6254  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6255 
6256  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6257  {
6258  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
6259  m_FreeSuballocationsBySize.data(),
6260  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6261  item,
6262  VmaSuballocationItemSizeLess());
6263  for(size_t index = it - m_FreeSuballocationsBySize.data();
6264  index < m_FreeSuballocationsBySize.size();
6265  ++index)
6266  {
6267  if(m_FreeSuballocationsBySize[index] == item)
6268  {
6269  VmaVectorRemove(m_FreeSuballocationsBySize, index);
6270  return;
6271  }
6272  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
6273  }
6274  VMA_ASSERT(0 && "Not found.");
6275  }
6276 
6277  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6278 }
6279 
6281 // class VmaDeviceMemoryBlock
6282 
6283 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
6284  m_Metadata(hAllocator),
6285  m_MemoryTypeIndex(UINT32_MAX),
6286  m_hMemory(VK_NULL_HANDLE),
6287  m_MapCount(0),
6288  m_pMappedData(VMA_NULL)
6289 {
6290 }
6291 
6292 void VmaDeviceMemoryBlock::Init(
6293  uint32_t newMemoryTypeIndex,
6294  VkDeviceMemory newMemory,
6295  VkDeviceSize newSize)
6296 {
6297  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6298 
6299  m_MemoryTypeIndex = newMemoryTypeIndex;
6300  m_hMemory = newMemory;
6301 
6302  m_Metadata.Init(newSize);
6303 }
6304 
6305 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6306 {
6307  // This is the most important assert in the entire library.
6308  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
6309  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
6310 
6311  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6312  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6313  m_hMemory = VK_NULL_HANDLE;
6314 }
6315 
6316 bool VmaDeviceMemoryBlock::Validate() const
6317 {
6318  if((m_hMemory == VK_NULL_HANDLE) ||
6319  (m_Metadata.GetSize() == 0))
6320  {
6321  return false;
6322  }
6323 
6324  return m_Metadata.Validate();
6325 }
6326 
6327 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData)
6328 {
6329  if(count == 0)
6330  {
6331  return VK_SUCCESS;
6332  }
6333 
6334  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6335  if(m_MapCount != 0)
6336  {
6337  m_MapCount += count;
6338  VMA_ASSERT(m_pMappedData != VMA_NULL);
6339  if(ppData != VMA_NULL)
6340  {
6341  *ppData = m_pMappedData;
6342  }
6343  return VK_SUCCESS;
6344  }
6345  else
6346  {
6347  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6348  hAllocator->m_hDevice,
6349  m_hMemory,
6350  0, // offset
6351  VK_WHOLE_SIZE,
6352  0, // flags
6353  &m_pMappedData);
6354  if(result == VK_SUCCESS)
6355  {
6356  if(ppData != VMA_NULL)
6357  {
6358  *ppData = m_pMappedData;
6359  }
6360  m_MapCount = count;
6361  }
6362  return result;
6363  }
6364 }
6365 
6366 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6367 {
6368  if(count == 0)
6369  {
6370  return;
6371  }
6372 
6373  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6374  if(m_MapCount >= count)
6375  {
6376  m_MapCount -= count;
6377  if(m_MapCount == 0)
6378  {
6379  m_pMappedData = VMA_NULL;
6380  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6381  }
6382  }
6383  else
6384  {
6385  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
6386  }
6387 }
6388 
6389 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
6390  const VmaAllocator hAllocator,
6391  const VmaAllocation hAllocation,
6392  VkBuffer hBuffer)
6393 {
6394  VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6395  hAllocation->GetBlock() == this);
6396  // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
6397  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6398  return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
6399  hAllocator->m_hDevice,
6400  hBuffer,
6401  m_hMemory,
6402  hAllocation->GetOffset());
6403 }
6404 
6405 VkResult VmaDeviceMemoryBlock::BindImageMemory(
6406  const VmaAllocator hAllocator,
6407  const VmaAllocation hAllocation,
6408  VkImage hImage)
6409 {
6410  VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6411  hAllocation->GetBlock() == this);
6412  // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
6413  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6414  return hAllocator->GetVulkanFunctions().vkBindImageMemory(
6415  hAllocator->m_hDevice,
6416  hImage,
6417  m_hMemory,
6418  hAllocation->GetOffset());
6419 }
6420 
6421 static void InitStatInfo(VmaStatInfo& outInfo)
6422 {
6423  memset(&outInfo, 0, sizeof(outInfo));
6424  outInfo.allocationSizeMin = UINT64_MAX;
6425  outInfo.unusedRangeSizeMin = UINT64_MAX;
6426 }
6427 
6428 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
6429 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
6430 {
6431  inoutInfo.blockCount += srcInfo.blockCount;
6432  inoutInfo.allocationCount += srcInfo.allocationCount;
6433  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
6434  inoutInfo.usedBytes += srcInfo.usedBytes;
6435  inoutInfo.unusedBytes += srcInfo.unusedBytes;
6436  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
6437  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
6438  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
6439  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
6440 }
6441 
6442 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
6443 {
6444  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
6445  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
6446  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
6447  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
6448 }
6449 
6450 VmaPool_T::VmaPool_T(
6451  VmaAllocator hAllocator,
6452  const VmaPoolCreateInfo& createInfo) :
6453  m_BlockVector(
6454  hAllocator,
6455  createInfo.memoryTypeIndex,
6456  createInfo.blockSize,
6457  createInfo.minBlockCount,
6458  createInfo.maxBlockCount,
6459  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
6460  createInfo.frameInUseCount,
6461  true) // isCustomPool
6462 {
6463 }
6464 
6465 VmaPool_T::~VmaPool_T()
6466 {
6467 }
6468 
6469 #if VMA_STATS_STRING_ENABLED
6470 
6471 #endif // #if VMA_STATS_STRING_ENABLED
6472 
6473 VmaBlockVector::VmaBlockVector(
6474  VmaAllocator hAllocator,
6475  uint32_t memoryTypeIndex,
6476  VkDeviceSize preferredBlockSize,
6477  size_t minBlockCount,
6478  size_t maxBlockCount,
6479  VkDeviceSize bufferImageGranularity,
6480  uint32_t frameInUseCount,
6481  bool isCustomPool) :
6482  m_hAllocator(hAllocator),
6483  m_MemoryTypeIndex(memoryTypeIndex),
6484  m_PreferredBlockSize(preferredBlockSize),
6485  m_MinBlockCount(minBlockCount),
6486  m_MaxBlockCount(maxBlockCount),
6487  m_BufferImageGranularity(bufferImageGranularity),
6488  m_FrameInUseCount(frameInUseCount),
6489  m_IsCustomPool(isCustomPool),
6490  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6491  m_HasEmptyBlock(false),
6492  m_pDefragmentator(VMA_NULL)
6493 {
6494 }
6495 
6496 VmaBlockVector::~VmaBlockVector()
6497 {
6498  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6499 
6500  for(size_t i = m_Blocks.size(); i--; )
6501  {
6502  m_Blocks[i]->Destroy(m_hAllocator);
6503  vma_delete(m_hAllocator, m_Blocks[i]);
6504  }
6505 }
6506 
6507 VkResult VmaBlockVector::CreateMinBlocks()
6508 {
6509  for(size_t i = 0; i < m_MinBlockCount; ++i)
6510  {
6511  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6512  if(res != VK_SUCCESS)
6513  {
6514  return res;
6515  }
6516  }
6517  return VK_SUCCESS;
6518 }
6519 
6520 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
6521 {
6522  pStats->size = 0;
6523  pStats->unusedSize = 0;
6524  pStats->allocationCount = 0;
6525  pStats->unusedRangeCount = 0;
6526  pStats->unusedRangeSizeMax = 0;
6527 
6528  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6529 
6530  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6531  {
6532  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
6533  VMA_ASSERT(pBlock);
6534  VMA_HEAVY_ASSERT(pBlock->Validate());
6535  pBlock->m_Metadata.AddPoolStats(*pStats);
6536  }
6537 }
6538 
6539 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6540 
6541 VkResult VmaBlockVector::Allocate(
6542  VmaPool hCurrentPool,
6543  uint32_t currentFrameIndex,
6544  const VkMemoryRequirements& vkMemReq,
6545  const VmaAllocationCreateInfo& createInfo,
6546  VmaSuballocationType suballocType,
6547  VmaAllocation* pAllocation)
6548 {
6549  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
6550  const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
6551 
6552  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6553 
6554  // 1. Search existing allocations. Try to allocate without making other allocations lost.
6555  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
6556  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6557  {
6558  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
6559  VMA_ASSERT(pCurrBlock);
6560  VmaAllocationRequest currRequest = {};
6561  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6562  currentFrameIndex,
6563  m_FrameInUseCount,
6564  m_BufferImageGranularity,
6565  vkMemReq.size,
6566  vkMemReq.alignment,
6567  suballocType,
6568  false, // canMakeOtherLost
6569  &currRequest))
6570  {
6571  // Allocate from pCurrBlock.
6572  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6573 
6574  if(mapped)
6575  {
6576  VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6577  if(res != VK_SUCCESS)
6578  {
6579  return res;
6580  }
6581  }
6582 
6583  // We no longer have an empty Allocation.
6584  if(pCurrBlock->m_Metadata.IsEmpty())
6585  {
6586  m_HasEmptyBlock = false;
6587  }
6588 
6589  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6590  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6591  (*pAllocation)->InitBlockAllocation(
6592  hCurrentPool,
6593  pCurrBlock,
6594  currRequest.offset,
6595  vkMemReq.alignment,
6596  vkMemReq.size,
6597  suballocType,
6598  mapped,
6599  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
6600  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6601  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
6602  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
6603  return VK_SUCCESS;
6604  }
6605  }
6606 
6607  const bool canCreateNewBlock =
6608  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
6609  (m_Blocks.size() < m_MaxBlockCount);
6610 
6611  // 2. Try to create new block.
6612  if(canCreateNewBlock)
6613  {
6614  // Calculate optimal size for new block.
6615  VkDeviceSize newBlockSize = m_PreferredBlockSize;
6616  uint32_t newBlockSizeShift = 0;
6617  const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6618 
6619  // Allocating blocks of other sizes is allowed only in default pools.
6620  // In custom pools block size is fixed.
6621  if(m_IsCustomPool == false)
6622  {
6623  // Allocate 1/8, 1/4, 1/2 as first blocks.
6624  const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6625  for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6626  {
6627  const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6628  if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6629  {
6630  newBlockSize = smallerNewBlockSize;
6631  ++newBlockSizeShift;
6632  }
6633  else
6634  {
6635  break;
6636  }
6637  }
6638  }
6639 
6640  size_t newBlockIndex = 0;
6641  VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6642  // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize.
6643  if(m_IsCustomPool == false)
6644  {
6645  while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6646  {
6647  const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6648  if(smallerNewBlockSize >= vkMemReq.size)
6649  {
6650  newBlockSize = smallerNewBlockSize;
6651  ++newBlockSizeShift;
6652  res = CreateBlock(newBlockSize, &newBlockIndex);
6653  }
6654  else
6655  {
6656  break;
6657  }
6658  }
6659  }
6660 
6661  if(res == VK_SUCCESS)
6662  {
6663  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
6664  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6665 
6666  if(mapped)
6667  {
6668  res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6669  if(res != VK_SUCCESS)
6670  {
6671  return res;
6672  }
6673  }
6674 
6675  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
6676  VmaAllocationRequest allocRequest;
6677  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6678  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6679  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6680  (*pAllocation)->InitBlockAllocation(
6681  hCurrentPool,
6682  pBlock,
6683  allocRequest.offset,
6684  vkMemReq.alignment,
6685  vkMemReq.size,
6686  suballocType,
6687  mapped,
6688  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
6689  VMA_HEAVY_ASSERT(pBlock->Validate());
6690  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
6691  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
6692  return VK_SUCCESS;
6693  }
6694  }
6695 
6696  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
6697 
6698  // 3. Try to allocate from existing blocks with making other allocations lost.
6699  if(canMakeOtherLost)
6700  {
6701  uint32_t tryIndex = 0;
6702  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6703  {
6704  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6705  VmaAllocationRequest bestRequest = {};
6706  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6707 
6708  // 1. Search existing allocations.
6709  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
6710  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6711  {
6712  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
6713  VMA_ASSERT(pCurrBlock);
6714  VmaAllocationRequest currRequest = {};
6715  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6716  currentFrameIndex,
6717  m_FrameInUseCount,
6718  m_BufferImageGranularity,
6719  vkMemReq.size,
6720  vkMemReq.alignment,
6721  suballocType,
6722  canMakeOtherLost,
6723  &currRequest))
6724  {
6725  const VkDeviceSize currRequestCost = currRequest.CalcCost();
6726  if(pBestRequestBlock == VMA_NULL ||
6727  currRequestCost < bestRequestCost)
6728  {
6729  pBestRequestBlock = pCurrBlock;
6730  bestRequest = currRequest;
6731  bestRequestCost = currRequestCost;
6732 
6733  if(bestRequestCost == 0)
6734  {
6735  break;
6736  }
6737  }
6738  }
6739  }
6740 
6741  if(pBestRequestBlock != VMA_NULL)
6742  {
6743  if(mapped)
6744  {
6745  VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6746  if(res != VK_SUCCESS)
6747  {
6748  return res;
6749  }
6750  }
6751 
6752  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6753  currentFrameIndex,
6754  m_FrameInUseCount,
6755  &bestRequest))
6756  {
6757  // We no longer have an empty Allocation.
6758  if(pBestRequestBlock->m_Metadata.IsEmpty())
6759  {
6760  m_HasEmptyBlock = false;
6761  }
6762  // Allocate from this pBlock.
6763  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6764  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6765  (*pAllocation)->InitBlockAllocation(
6766  hCurrentPool,
6767  pBestRequestBlock,
6768  bestRequest.offset,
6769  vkMemReq.alignment,
6770  vkMemReq.size,
6771  suballocType,
6772  mapped,
6773  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
6774  VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6775  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
6776  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
6777  return VK_SUCCESS;
6778  }
6779  // else: Some allocations must have been touched while we are here. Next try.
6780  }
6781  else
6782  {
6783  // Could not find place in any of the blocks - break outer loop.
6784  break;
6785  }
6786  }
6787  /* Maximum number of tries exceeded - a very unlike event when many other
6788  threads are simultaneously touching allocations making it impossible to make
6789  lost at the same time as we try to allocate. */
6790  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6791  {
6792  return VK_ERROR_TOO_MANY_OBJECTS;
6793  }
6794  }
6795 
6796  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6797 }
6798 
6799 void VmaBlockVector::Free(
6800  VmaAllocation hAllocation)
6801 {
6802  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6803 
6804  // Scope for lock.
6805  {
6806  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6807 
6808  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6809 
6810  if(hAllocation->IsPersistentMap())
6811  {
6812  pBlock->Unmap(m_hAllocator, 1);
6813  }
6814 
6815  pBlock->m_Metadata.Free(hAllocation);
6816  VMA_HEAVY_ASSERT(pBlock->Validate());
6817 
6818  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
6819 
6820  // pBlock became empty after this deallocation.
6821  if(pBlock->m_Metadata.IsEmpty())
6822  {
6823  // Already has empty Allocation. We don't want to have two, so delete this one.
6824  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6825  {
6826  pBlockToDelete = pBlock;
6827  Remove(pBlock);
6828  }
6829  // We now have first empty Allocation.
6830  else
6831  {
6832  m_HasEmptyBlock = true;
6833  }
6834  }
6835  // pBlock didn't become empty, but we have another empty block - find and free that one.
6836  // (This is optional, heuristics.)
6837  else if(m_HasEmptyBlock)
6838  {
6839  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6840  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6841  {
6842  pBlockToDelete = pLastBlock;
6843  m_Blocks.pop_back();
6844  m_HasEmptyBlock = false;
6845  }
6846  }
6847 
6848  IncrementallySortBlocks();
6849  }
6850 
6851  // Destruction of a free Allocation. Deferred until this point, outside of mutex
6852  // lock, for performance reason.
6853  if(pBlockToDelete != VMA_NULL)
6854  {
6855  VMA_DEBUG_LOG(" Deleted empty allocation");
6856  pBlockToDelete->Destroy(m_hAllocator);
6857  vma_delete(m_hAllocator, pBlockToDelete);
6858  }
6859 }
6860 
6861 size_t VmaBlockVector::CalcMaxBlockSize() const
6862 {
6863  size_t result = 0;
6864  for(size_t i = m_Blocks.size(); i--; )
6865  {
6866  result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6867  if(result >= m_PreferredBlockSize)
6868  {
6869  break;
6870  }
6871  }
6872  return result;
6873 }
6874 
6875 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6876 {
6877  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6878  {
6879  if(m_Blocks[blockIndex] == pBlock)
6880  {
6881  VmaVectorRemove(m_Blocks, blockIndex);
6882  return;
6883  }
6884  }
6885  VMA_ASSERT(0);
6886 }
6887 
6888 void VmaBlockVector::IncrementallySortBlocks()
6889 {
6890  // Bubble sort only until first swap.
6891  for(size_t i = 1; i < m_Blocks.size(); ++i)
6892  {
6893  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6894  {
6895  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6896  return;
6897  }
6898  }
6899 }
6900 
6901 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
6902 {
6903  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6904  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6905  allocInfo.allocationSize = blockSize;
6906  VkDeviceMemory mem = VK_NULL_HANDLE;
6907  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6908  if(res < 0)
6909  {
6910  return res;
6911  }
6912 
6913  // New VkDeviceMemory successfully created.
6914 
6915  // Create new Allocation for it.
6916  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6917  pBlock->Init(
6918  m_MemoryTypeIndex,
6919  mem,
6920  allocInfo.allocationSize);
6921 
6922  m_Blocks.push_back(pBlock);
6923  if(pNewBlockIndex != VMA_NULL)
6924  {
6925  *pNewBlockIndex = m_Blocks.size() - 1;
6926  }
6927 
6928  return VK_SUCCESS;
6929 }
6930 
6931 #if VMA_STATS_STRING_ENABLED
6932 
6933 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
6934 {
6935  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6936 
6937  json.BeginObject();
6938 
6939  if(m_IsCustomPool)
6940  {
6941  json.WriteString("MemoryTypeIndex");
6942  json.WriteNumber(m_MemoryTypeIndex);
6943 
6944  json.WriteString("BlockSize");
6945  json.WriteNumber(m_PreferredBlockSize);
6946 
6947  json.WriteString("BlockCount");
6948  json.BeginObject(true);
6949  if(m_MinBlockCount > 0)
6950  {
6951  json.WriteString("Min");
6952  json.WriteNumber((uint64_t)m_MinBlockCount);
6953  }
6954  if(m_MaxBlockCount < SIZE_MAX)
6955  {
6956  json.WriteString("Max");
6957  json.WriteNumber((uint64_t)m_MaxBlockCount);
6958  }
6959  json.WriteString("Cur");
6960  json.WriteNumber((uint64_t)m_Blocks.size());
6961  json.EndObject();
6962 
6963  if(m_FrameInUseCount > 0)
6964  {
6965  json.WriteString("FrameInUseCount");
6966  json.WriteNumber(m_FrameInUseCount);
6967  }
6968  }
6969  else
6970  {
6971  json.WriteString("PreferredBlockSize");
6972  json.WriteNumber(m_PreferredBlockSize);
6973  }
6974 
6975  json.WriteString("Blocks");
6976  json.BeginArray();
6977  for(size_t i = 0; i < m_Blocks.size(); ++i)
6978  {
6979  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6980  }
6981  json.EndArray();
6982 
6983  json.EndObject();
6984 }
6985 
6986 #endif // #if VMA_STATS_STRING_ENABLED
6987 
6988 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6989  VmaAllocator hAllocator,
6990  uint32_t currentFrameIndex)
6991 {
6992  if(m_pDefragmentator == VMA_NULL)
6993  {
6994  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6995  hAllocator,
6996  this,
6997  currentFrameIndex);
6998  }
6999 
7000  return m_pDefragmentator;
7001 }
7002 
7003 VkResult VmaBlockVector::Defragment(
7004  VmaDefragmentationStats* pDefragmentationStats,
7005  VkDeviceSize& maxBytesToMove,
7006  uint32_t& maxAllocationsToMove)
7007 {
7008  if(m_pDefragmentator == VMA_NULL)
7009  {
7010  return VK_SUCCESS;
7011  }
7012 
7013  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7014 
7015  // Defragment.
7016  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7017 
7018  // Accumulate statistics.
7019  if(pDefragmentationStats != VMA_NULL)
7020  {
7021  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
7022  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7023  pDefragmentationStats->bytesMoved += bytesMoved;
7024  pDefragmentationStats->allocationsMoved += allocationsMoved;
7025  VMA_ASSERT(bytesMoved <= maxBytesToMove);
7026  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7027  maxBytesToMove -= bytesMoved;
7028  maxAllocationsToMove -= allocationsMoved;
7029  }
7030 
7031  // Free empty blocks.
7032  m_HasEmptyBlock = false;
7033  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
7034  {
7035  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7036  if(pBlock->m_Metadata.IsEmpty())
7037  {
7038  if(m_Blocks.size() > m_MinBlockCount)
7039  {
7040  if(pDefragmentationStats != VMA_NULL)
7041  {
7042  ++pDefragmentationStats->deviceMemoryBlocksFreed;
7043  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
7044  }
7045 
7046  VmaVectorRemove(m_Blocks, blockIndex);
7047  pBlock->Destroy(m_hAllocator);
7048  vma_delete(m_hAllocator, pBlock);
7049  }
7050  else
7051  {
7052  m_HasEmptyBlock = true;
7053  }
7054  }
7055  }
7056 
7057  return result;
7058 }
7059 
7060 void VmaBlockVector::DestroyDefragmentator()
7061 {
7062  if(m_pDefragmentator != VMA_NULL)
7063  {
7064  vma_delete(m_hAllocator, m_pDefragmentator);
7065  m_pDefragmentator = VMA_NULL;
7066  }
7067 }
7068 
7069 void VmaBlockVector::MakePoolAllocationsLost(
7070  uint32_t currentFrameIndex,
7071  size_t* pLostAllocationCount)
7072 {
7073  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7074  size_t lostAllocationCount = 0;
7075  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7076  {
7077  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
7078  VMA_ASSERT(pBlock);
7079  lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7080  }
7081  if(pLostAllocationCount != VMA_NULL)
7082  {
7083  *pLostAllocationCount = lostAllocationCount;
7084  }
7085 }
7086 
7087 void VmaBlockVector::AddStats(VmaStats* pStats)
7088 {
7089  const uint32_t memTypeIndex = m_MemoryTypeIndex;
7090  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7091 
7092  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7093 
7094  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7095  {
7096  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
7097  VMA_ASSERT(pBlock);
7098  VMA_HEAVY_ASSERT(pBlock->Validate());
7099  VmaStatInfo allocationStatInfo;
7100  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7101  VmaAddStatInfo(pStats->total, allocationStatInfo);
7102  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
7103  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
7104  }
7105 }
7106 
7108 // VmaDefragmentator members definition
7109 
7110 VmaDefragmentator::VmaDefragmentator(
7111  VmaAllocator hAllocator,
7112  VmaBlockVector* pBlockVector,
7113  uint32_t currentFrameIndex) :
7114  m_hAllocator(hAllocator),
7115  m_pBlockVector(pBlockVector),
7116  m_CurrentFrameIndex(currentFrameIndex),
7117  m_BytesMoved(0),
7118  m_AllocationsMoved(0),
7119  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7120  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7121 {
7122 }
7123 
7124 VmaDefragmentator::~VmaDefragmentator()
7125 {
7126  for(size_t i = m_Blocks.size(); i--; )
7127  {
7128  vma_delete(m_hAllocator, m_Blocks[i]);
7129  }
7130 }
7131 
7132 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
7133 {
7134  AllocationInfo allocInfo;
7135  allocInfo.m_hAllocation = hAlloc;
7136  allocInfo.m_pChanged = pChanged;
7137  m_Allocations.push_back(allocInfo);
7138 }
7139 
7140 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
7141 {
7142  // It has already been mapped for defragmentation.
7143  if(m_pMappedDataForDefragmentation)
7144  {
7145  *ppMappedData = m_pMappedDataForDefragmentation;
7146  return VK_SUCCESS;
7147  }
7148 
7149  // It is originally mapped.
7150  if(m_pBlock->GetMappedData())
7151  {
7152  *ppMappedData = m_pBlock->GetMappedData();
7153  return VK_SUCCESS;
7154  }
7155 
7156  // Map on first usage.
7157  VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7158  *ppMappedData = m_pMappedDataForDefragmentation;
7159  return res;
7160 }
7161 
7162 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
7163 {
7164  if(m_pMappedDataForDefragmentation != VMA_NULL)
7165  {
7166  m_pBlock->Unmap(hAllocator, 1);
7167  }
7168 }
7169 
7170 VkResult VmaDefragmentator::DefragmentRound(
7171  VkDeviceSize maxBytesToMove,
7172  uint32_t maxAllocationsToMove)
7173 {
7174  if(m_Blocks.empty())
7175  {
7176  return VK_SUCCESS;
7177  }
7178 
7179  size_t srcBlockIndex = m_Blocks.size() - 1;
7180  size_t srcAllocIndex = SIZE_MAX;
7181  for(;;)
7182  {
7183  // 1. Find next allocation to move.
7184  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
7185  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
7186  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7187  {
7188  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7189  {
7190  // Finished: no more allocations to process.
7191  if(srcBlockIndex == 0)
7192  {
7193  return VK_SUCCESS;
7194  }
7195  else
7196  {
7197  --srcBlockIndex;
7198  srcAllocIndex = SIZE_MAX;
7199  }
7200  }
7201  else
7202  {
7203  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7204  }
7205  }
7206 
7207  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7208  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7209 
7210  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7211  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7212  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7213  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7214 
7215  // 2. Try to find new place for this allocation in preceding or current block.
7216  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7217  {
7218  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7219  VmaAllocationRequest dstAllocRequest;
7220  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7221  m_CurrentFrameIndex,
7222  m_pBlockVector->GetFrameInUseCount(),
7223  m_pBlockVector->GetBufferImageGranularity(),
7224  size,
7225  alignment,
7226  suballocType,
7227  false, // canMakeOtherLost
7228  &dstAllocRequest) &&
7229  MoveMakesSense(
7230  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7231  {
7232  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7233 
7234  // Reached limit on number of allocations or bytes to move.
7235  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7236  (m_BytesMoved + size > maxBytesToMove))
7237  {
7238  return VK_INCOMPLETE;
7239  }
7240 
7241  void* pDstMappedData = VMA_NULL;
7242  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7243  if(res != VK_SUCCESS)
7244  {
7245  return res;
7246  }
7247 
7248  void* pSrcMappedData = VMA_NULL;
7249  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7250  if(res != VK_SUCCESS)
7251  {
7252  return res;
7253  }
7254 
7255  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
7256  memcpy(
7257  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7258  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7259  static_cast<size_t>(size));
7260 
7261  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7262  pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7263 
7264  allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7265 
7266  if(allocInfo.m_pChanged != VMA_NULL)
7267  {
7268  *allocInfo.m_pChanged = VK_TRUE;
7269  }
7270 
7271  ++m_AllocationsMoved;
7272  m_BytesMoved += size;
7273 
7274  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7275 
7276  break;
7277  }
7278  }
7279 
7280  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
7281 
7282  if(srcAllocIndex > 0)
7283  {
7284  --srcAllocIndex;
7285  }
7286  else
7287  {
7288  if(srcBlockIndex > 0)
7289  {
7290  --srcBlockIndex;
7291  srcAllocIndex = SIZE_MAX;
7292  }
7293  else
7294  {
7295  return VK_SUCCESS;
7296  }
7297  }
7298  }
7299 }
7300 
7301 VkResult VmaDefragmentator::Defragment(
7302  VkDeviceSize maxBytesToMove,
7303  uint32_t maxAllocationsToMove)
7304 {
7305  if(m_Allocations.empty())
7306  {
7307  return VK_SUCCESS;
7308  }
7309 
7310  // Create block info for each block.
7311  const size_t blockCount = m_pBlockVector->m_Blocks.size();
7312  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7313  {
7314  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7315  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7316  m_Blocks.push_back(pBlockInfo);
7317  }
7318 
7319  // Sort them by m_pBlock pointer value.
7320  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7321 
7322  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
7323  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7324  {
7325  AllocationInfo& allocInfo = m_Allocations[blockIndex];
7326  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
7327  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7328  {
7329  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7330  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7331  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7332  {
7333  (*it)->m_Allocations.push_back(allocInfo);
7334  }
7335  else
7336  {
7337  VMA_ASSERT(0);
7338  }
7339  }
7340  }
7341  m_Allocations.clear();
7342 
7343  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7344  {
7345  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7346  pBlockInfo->CalcHasNonMovableAllocations();
7347  pBlockInfo->SortAllocationsBySizeDescecnding();
7348  }
7349 
7350  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
7351  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7352 
7353  // Execute defragmentation rounds (the main part).
7354  VkResult result = VK_SUCCESS;
7355  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7356  {
7357  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7358  }
7359 
7360  // Unmap blocks that were mapped for defragmentation.
7361  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7362  {
7363  m_Blocks[blockIndex]->Unmap(m_hAllocator);
7364  }
7365 
7366  return result;
7367 }
7368 
7369 bool VmaDefragmentator::MoveMakesSense(
7370  size_t dstBlockIndex, VkDeviceSize dstOffset,
7371  size_t srcBlockIndex, VkDeviceSize srcOffset)
7372 {
7373  if(dstBlockIndex < srcBlockIndex)
7374  {
7375  return true;
7376  }
7377  if(dstBlockIndex > srcBlockIndex)
7378  {
7379  return false;
7380  }
7381  if(dstOffset < srcOffset)
7382  {
7383  return true;
7384  }
7385  return false;
7386 }
7387 
7389 // VmaAllocator_T
7390 
7391 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
7392  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
7393  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
7394  m_hDevice(pCreateInfo->device),
7395  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7396  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7397  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7398  m_PreferredLargeHeapBlockSize(0),
7399  m_PhysicalDevice(pCreateInfo->physicalDevice),
7400  m_CurrentFrameIndex(0),
7401  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7402 {
7403  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
7404 
7405 #if !(VMA_DEDICATED_ALLOCATION)
7407  {
7408  VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
7409  }
7410 #endif
7411 
7412  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
7413  memset(&m_MemProps, 0, sizeof(m_MemProps));
7414  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
7415 
7416  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
7417  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
7418 
7419  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7420  {
7421  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7422  }
7423 
7424  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
7425  {
7426  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
7427  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
7428  }
7429 
7430  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
7431 
7432  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7433  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7434 
7435  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
7436  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
7437 
7438  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
7439  {
7440  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7441  {
7442  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
7443  if(limit != VK_WHOLE_SIZE)
7444  {
7445  m_HeapSizeLimit[heapIndex] = limit;
7446  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7447  {
7448  m_MemProps.memoryHeaps[heapIndex].size = limit;
7449  }
7450  }
7451  }
7452  }
7453 
7454  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7455  {
7456  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7457 
7458  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
7459  this,
7460  memTypeIndex,
7461  preferredBlockSize,
7462  0,
7463  SIZE_MAX,
7464  GetBufferImageGranularity(),
7465  pCreateInfo->frameInUseCount,
7466  false); // isCustomPool
7467  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
7468  // becase minBlockCount is 0.
7469  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7470  }
7471 }
7472 
7473 VmaAllocator_T::~VmaAllocator_T()
7474 {
7475  VMA_ASSERT(m_Pools.empty());
7476 
7477  for(size_t i = GetMemoryTypeCount(); i--; )
7478  {
7479  vma_delete(this, m_pDedicatedAllocations[i]);
7480  vma_delete(this, m_pBlockVectors[i]);
7481  }
7482 }
7483 
7484 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
7485 {
7486 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
7487  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7488  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7489  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7490  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7491  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7492  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7493  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7494  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7495  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7496  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7497  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7498  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7499  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7500  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7501 #if VMA_DEDICATED_ALLOCATION
7502  if(m_UseKhrDedicatedAllocation)
7503  {
7504  m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7505  (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
7506  m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7507  (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
7508  }
7509 #endif // #if VMA_DEDICATED_ALLOCATION
7510 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
7511 
7512 #define VMA_COPY_IF_NOT_NULL(funcName) \
7513  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
7514 
7515  if(pVulkanFunctions != VMA_NULL)
7516  {
7517  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7518  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7519  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7520  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7521  VMA_COPY_IF_NOT_NULL(vkMapMemory);
7522  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7523  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7524  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7525  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7526  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7527  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7528  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7529  VMA_COPY_IF_NOT_NULL(vkCreateImage);
7530  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7531 #if VMA_DEDICATED_ALLOCATION
7532  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7533  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7534 #endif
7535  }
7536 
7537 #undef VMA_COPY_IF_NOT_NULL
7538 
7539  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
7540  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
7541  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7542  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7543  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7544  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7545  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7546  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7547  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7548  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7549  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7550  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7551  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7552  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7553  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7554  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7555 #if VMA_DEDICATED_ALLOCATION
7556  if(m_UseKhrDedicatedAllocation)
7557  {
7558  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7559  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7560  }
7561 #endif
7562 }
7563 
7564 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7565 {
7566  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7567  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7568  const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7569  return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7570 }
7571 
7572 VkResult VmaAllocator_T::AllocateMemoryOfType(
7573  const VkMemoryRequirements& vkMemReq,
7574  bool dedicatedAllocation,
7575  VkBuffer dedicatedBuffer,
7576  VkImage dedicatedImage,
7577  const VmaAllocationCreateInfo& createInfo,
7578  uint32_t memTypeIndex,
7579  VmaSuballocationType suballocType,
7580  VmaAllocation* pAllocation)
7581 {
7582  VMA_ASSERT(pAllocation != VMA_NULL);
7583  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7584 
7585  VmaAllocationCreateInfo finalCreateInfo = createInfo;
7586 
7587  // If memory type is not HOST_VISIBLE, disable MAPPED.
7588  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
7589  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7590  {
7591  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
7592  }
7593 
7594  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
7595  VMA_ASSERT(blockVector);
7596 
7597  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7598  bool preferDedicatedMemory =
7599  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7600  dedicatedAllocation ||
7601  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
7602  vkMemReq.size > preferredBlockSize / 2;
7603 
7604  if(preferDedicatedMemory &&
7605  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
7606  finalCreateInfo.pool == VK_NULL_HANDLE)
7607  {
7609  }
7610 
7611  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
7612  {
7613  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7614  {
7615  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7616  }
7617  else
7618  {
7619  return AllocateDedicatedMemory(
7620  vkMemReq.size,
7621  suballocType,
7622  memTypeIndex,
7623  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
7624  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
7625  finalCreateInfo.pUserData,
7626  dedicatedBuffer,
7627  dedicatedImage,
7628  pAllocation);
7629  }
7630  }
7631  else
7632  {
7633  VkResult res = blockVector->Allocate(
7634  VK_NULL_HANDLE, // hCurrentPool
7635  m_CurrentFrameIndex.load(),
7636  vkMemReq,
7637  finalCreateInfo,
7638  suballocType,
7639  pAllocation);
7640  if(res == VK_SUCCESS)
7641  {
7642  return res;
7643  }
7644 
7645  // 5. Try dedicated memory.
7646  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7647  {
7648  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7649  }
7650  else
7651  {
7652  res = AllocateDedicatedMemory(
7653  vkMemReq.size,
7654  suballocType,
7655  memTypeIndex,
7656  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
7657  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
7658  finalCreateInfo.pUserData,
7659  dedicatedBuffer,
7660  dedicatedImage,
7661  pAllocation);
7662  if(res == VK_SUCCESS)
7663  {
7664  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
7665  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
7666  return VK_SUCCESS;
7667  }
7668  else
7669  {
7670  // Everything failed: Return error code.
7671  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
7672  return res;
7673  }
7674  }
7675  }
7676 }
7677 
7678 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7679  VkDeviceSize size,
7680  VmaSuballocationType suballocType,
7681  uint32_t memTypeIndex,
7682  bool map,
7683  bool isUserDataString,
7684  void* pUserData,
7685  VkBuffer dedicatedBuffer,
7686  VkImage dedicatedImage,
7687  VmaAllocation* pAllocation)
7688 {
7689  VMA_ASSERT(pAllocation);
7690 
7691  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7692  allocInfo.memoryTypeIndex = memTypeIndex;
7693  allocInfo.allocationSize = size;
7694 
7695 #if VMA_DEDICATED_ALLOCATION
7696  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7697  if(m_UseKhrDedicatedAllocation)
7698  {
7699  if(dedicatedBuffer != VK_NULL_HANDLE)
7700  {
7701  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7702  dedicatedAllocInfo.buffer = dedicatedBuffer;
7703  allocInfo.pNext = &dedicatedAllocInfo;
7704  }
7705  else if(dedicatedImage != VK_NULL_HANDLE)
7706  {
7707  dedicatedAllocInfo.image = dedicatedImage;
7708  allocInfo.pNext = &dedicatedAllocInfo;
7709  }
7710  }
7711 #endif // #if VMA_DEDICATED_ALLOCATION
7712 
7713  // Allocate VkDeviceMemory.
7714  VkDeviceMemory hMemory = VK_NULL_HANDLE;
7715  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7716  if(res < 0)
7717  {
7718  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
7719  return res;
7720  }
7721 
7722  void* pMappedData = VMA_NULL;
7723  if(map)
7724  {
7725  res = (*m_VulkanFunctions.vkMapMemory)(
7726  m_hDevice,
7727  hMemory,
7728  0,
7729  VK_WHOLE_SIZE,
7730  0,
7731  &pMappedData);
7732  if(res < 0)
7733  {
7734  VMA_DEBUG_LOG(" vkMapMemory FAILED");
7735  FreeVulkanMemory(memTypeIndex, size, hMemory);
7736  return res;
7737  }
7738  }
7739 
7740  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7741  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7742  (*pAllocation)->SetUserData(this, pUserData);
7743 
7744  // Register it in m_pDedicatedAllocations.
7745  {
7746  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7747  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7748  VMA_ASSERT(pDedicatedAllocations);
7749  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7750  }
7751 
7752  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7753 
7754  return VK_SUCCESS;
7755 }
7756 
7757 void VmaAllocator_T::GetBufferMemoryRequirements(
7758  VkBuffer hBuffer,
7759  VkMemoryRequirements& memReq,
7760  bool& requiresDedicatedAllocation,
7761  bool& prefersDedicatedAllocation) const
7762 {
7763 #if VMA_DEDICATED_ALLOCATION
7764  if(m_UseKhrDedicatedAllocation)
7765  {
7766  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7767  memReqInfo.buffer = hBuffer;
7768 
7769  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7770 
7771  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7772  memReq2.pNext = &memDedicatedReq;
7773 
7774  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7775 
7776  memReq = memReq2.memoryRequirements;
7777  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7778  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7779  }
7780  else
7781 #endif // #if VMA_DEDICATED_ALLOCATION
7782  {
7783  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7784  requiresDedicatedAllocation = false;
7785  prefersDedicatedAllocation = false;
7786  }
7787 }
7788 
7789 void VmaAllocator_T::GetImageMemoryRequirements(
7790  VkImage hImage,
7791  VkMemoryRequirements& memReq,
7792  bool& requiresDedicatedAllocation,
7793  bool& prefersDedicatedAllocation) const
7794 {
7795 #if VMA_DEDICATED_ALLOCATION
7796  if(m_UseKhrDedicatedAllocation)
7797  {
7798  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7799  memReqInfo.image = hImage;
7800 
7801  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7802 
7803  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7804  memReq2.pNext = &memDedicatedReq;
7805 
7806  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7807 
7808  memReq = memReq2.memoryRequirements;
7809  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7810  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7811  }
7812  else
7813 #endif // #if VMA_DEDICATED_ALLOCATION
7814  {
7815  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7816  requiresDedicatedAllocation = false;
7817  prefersDedicatedAllocation = false;
7818  }
7819 }
7820 
7821 VkResult VmaAllocator_T::AllocateMemory(
7822  const VkMemoryRequirements& vkMemReq,
7823  bool requiresDedicatedAllocation,
7824  bool prefersDedicatedAllocation,
7825  VkBuffer dedicatedBuffer,
7826  VkImage dedicatedImage,
7827  const VmaAllocationCreateInfo& createInfo,
7828  VmaSuballocationType suballocType,
7829  VmaAllocation* pAllocation)
7830 {
7831  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
7832  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7833  {
7834  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7835  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7836  }
7837  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
7839  {
7840  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7841  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7842  }
7843  if(requiresDedicatedAllocation)
7844  {
7845  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7846  {
7847  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7848  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7849  }
7850  if(createInfo.pool != VK_NULL_HANDLE)
7851  {
7852  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
7853  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7854  }
7855  }
7856  if((createInfo.pool != VK_NULL_HANDLE) &&
7857  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
7858  {
7859  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7860  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7861  }
7862 
7863  if(createInfo.pool != VK_NULL_HANDLE)
7864  {
7865  return createInfo.pool->m_BlockVector.Allocate(
7866  createInfo.pool,
7867  m_CurrentFrameIndex.load(),
7868  vkMemReq,
7869  createInfo,
7870  suballocType,
7871  pAllocation);
7872  }
7873  else
7874  {
7875  // Bit mask of memory Vulkan types acceptable for this allocation.
7876  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7877  uint32_t memTypeIndex = UINT32_MAX;
7878  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
7879  if(res == VK_SUCCESS)
7880  {
7881  res = AllocateMemoryOfType(
7882  vkMemReq,
7883  requiresDedicatedAllocation || prefersDedicatedAllocation,
7884  dedicatedBuffer,
7885  dedicatedImage,
7886  createInfo,
7887  memTypeIndex,
7888  suballocType,
7889  pAllocation);
7890  // Succeeded on first try.
7891  if(res == VK_SUCCESS)
7892  {
7893  return res;
7894  }
7895  // Allocation from this memory type failed. Try other compatible memory types.
7896  else
7897  {
7898  for(;;)
7899  {
7900  // Remove old memTypeIndex from list of possibilities.
7901  memoryTypeBits &= ~(1u << memTypeIndex);
7902  // Find alternative memTypeIndex.
7903  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
7904  if(res == VK_SUCCESS)
7905  {
7906  res = AllocateMemoryOfType(
7907  vkMemReq,
7908  requiresDedicatedAllocation || prefersDedicatedAllocation,
7909  dedicatedBuffer,
7910  dedicatedImage,
7911  createInfo,
7912  memTypeIndex,
7913  suballocType,
7914  pAllocation);
7915  // Allocation from this alternative memory type succeeded.
7916  if(res == VK_SUCCESS)
7917  {
7918  return res;
7919  }
7920  // else: Allocation from this memory type failed. Try next one - next loop iteration.
7921  }
7922  // No other matching memory type index could be found.
7923  else
7924  {
7925  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
7926  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7927  }
7928  }
7929  }
7930  }
7931  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
7932  else
7933  return res;
7934  }
7935 }
7936 
7937 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
7938 {
7939  VMA_ASSERT(allocation);
7940 
7941  if(allocation->CanBecomeLost() == false ||
7942  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7943  {
7944  switch(allocation->GetType())
7945  {
7946  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7947  {
7948  VmaBlockVector* pBlockVector = VMA_NULL;
7949  VmaPool hPool = allocation->GetPool();
7950  if(hPool != VK_NULL_HANDLE)
7951  {
7952  pBlockVector = &hPool->m_BlockVector;
7953  }
7954  else
7955  {
7956  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7957  pBlockVector = m_pBlockVectors[memTypeIndex];
7958  }
7959  pBlockVector->Free(allocation);
7960  }
7961  break;
7962  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7963  FreeDedicatedMemory(allocation);
7964  break;
7965  default:
7966  VMA_ASSERT(0);
7967  }
7968  }
7969 
7970  allocation->SetUserData(this, VMA_NULL);
7971  vma_delete(this, allocation);
7972 }
7973 
7974 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
7975 {
7976  // Initialize.
7977  InitStatInfo(pStats->total);
7978  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7979  InitStatInfo(pStats->memoryType[i]);
7980  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7981  InitStatInfo(pStats->memoryHeap[i]);
7982 
7983  // Process default pools.
7984  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7985  {
7986  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
7987  VMA_ASSERT(pBlockVector);
7988  pBlockVector->AddStats(pStats);
7989  }
7990 
7991  // Process custom pools.
7992  {
7993  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7994  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7995  {
7996  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7997  }
7998  }
7999 
8000  // Process dedicated allocations.
8001  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8002  {
8003  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8004  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8005  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8006  VMA_ASSERT(pDedicatedAllocVector);
8007  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
8008  {
8009  VmaStatInfo allocationStatInfo;
8010  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
8011  VmaAddStatInfo(pStats->total, allocationStatInfo);
8012  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
8013  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
8014  }
8015  }
8016 
8017  // Postprocess.
8018  VmaPostprocessCalcStatInfo(pStats->total);
8019  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
8020  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
8021  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
8022  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
8023 }
8024 
8025 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
8026 
8027 VkResult VmaAllocator_T::Defragment(
8028  VmaAllocation* pAllocations,
8029  size_t allocationCount,
8030  VkBool32* pAllocationsChanged,
8031  const VmaDefragmentationInfo* pDefragmentationInfo,
8032  VmaDefragmentationStats* pDefragmentationStats)
8033 {
8034  if(pAllocationsChanged != VMA_NULL)
8035  {
8036  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
8037  }
8038  if(pDefragmentationStats != VMA_NULL)
8039  {
8040  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
8041  }
8042 
8043  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
8044 
8045  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
8046 
8047  const size_t poolCount = m_Pools.size();
8048 
8049  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
8050  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
8051  {
8052  VmaAllocation hAlloc = pAllocations[allocIndex];
8053  VMA_ASSERT(hAlloc);
8054  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
8055  // DedicatedAlloc cannot be defragmented.
8056  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
8057  // Only HOST_VISIBLE memory types can be defragmented.
8058  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
8059  // Lost allocation cannot be defragmented.
8060  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
8061  {
8062  VmaBlockVector* pAllocBlockVector = VMA_NULL;
8063 
8064  const VmaPool hAllocPool = hAlloc->GetPool();
8065  // This allocation belongs to custom pool.
8066  if(hAllocPool != VK_NULL_HANDLE)
8067  {
8068  pAllocBlockVector = &hAllocPool->GetBlockVector();
8069  }
8070  // This allocation belongs to general pool.
8071  else
8072  {
8073  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
8074  }
8075 
8076  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
8077 
8078  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
8079  &pAllocationsChanged[allocIndex] : VMA_NULL;
8080  pDefragmentator->AddAllocation(hAlloc, pChanged);
8081  }
8082  }
8083 
8084  VkResult result = VK_SUCCESS;
8085 
8086  // ======== Main processing.
8087 
8088  VkDeviceSize maxBytesToMove = SIZE_MAX;
8089  uint32_t maxAllocationsToMove = UINT32_MAX;
8090  if(pDefragmentationInfo != VMA_NULL)
8091  {
8092  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
8093  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
8094  }
8095 
8096  // Process standard memory.
8097  for(uint32_t memTypeIndex = 0;
8098  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
8099  ++memTypeIndex)
8100  {
8101  // Only HOST_VISIBLE memory types can be defragmented.
8102  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8103  {
8104  result = m_pBlockVectors[memTypeIndex]->Defragment(
8105  pDefragmentationStats,
8106  maxBytesToMove,
8107  maxAllocationsToMove);
8108  }
8109  }
8110 
8111  // Process custom pools.
8112  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
8113  {
8114  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
8115  pDefragmentationStats,
8116  maxBytesToMove,
8117  maxAllocationsToMove);
8118  }
8119 
8120  // ======== Destroy defragmentators.
8121 
8122  // Process custom pools.
8123  for(size_t poolIndex = poolCount; poolIndex--; )
8124  {
8125  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
8126  }
8127 
8128  // Process standard memory.
8129  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
8130  {
8131  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8132  {
8133  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
8134  }
8135  }
8136 
8137  return result;
8138 }
8139 
8140 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
8141 {
8142  if(hAllocation->CanBecomeLost())
8143  {
8144  /*
8145  Warning: This is a carefully designed algorithm.
8146  Do not modify unless you really know what you're doing :)
8147  */
8148  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8149  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8150  for(;;)
8151  {
8152  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8153  {
8154  pAllocationInfo->memoryType = UINT32_MAX;
8155  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
8156  pAllocationInfo->offset = 0;
8157  pAllocationInfo->size = hAllocation->GetSize();
8158  pAllocationInfo->pMappedData = VMA_NULL;
8159  pAllocationInfo->pUserData = hAllocation->GetUserData();
8160  return;
8161  }
8162  else if(localLastUseFrameIndex == localCurrFrameIndex)
8163  {
8164  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
8165  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
8166  pAllocationInfo->offset = hAllocation->GetOffset();
8167  pAllocationInfo->size = hAllocation->GetSize();
8168  pAllocationInfo->pMappedData = VMA_NULL;
8169  pAllocationInfo->pUserData = hAllocation->GetUserData();
8170  return;
8171  }
8172  else // Last use time earlier than current time.
8173  {
8174  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8175  {
8176  localLastUseFrameIndex = localCurrFrameIndex;
8177  }
8178  }
8179  }
8180  }
8181  else
8182  {
8183  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
8184  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
8185  pAllocationInfo->offset = hAllocation->GetOffset();
8186  pAllocationInfo->size = hAllocation->GetSize();
8187  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
8188  pAllocationInfo->pUserData = hAllocation->GetUserData();
8189  }
8190 }
8191 
8192 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
8193 {
8194  // This is a stripped-down version of VmaAllocator_T::GetAllocationInfo.
8195  if(hAllocation->CanBecomeLost())
8196  {
8197  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8198  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8199  for(;;)
8200  {
8201  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8202  {
8203  return false;
8204  }
8205  else if(localLastUseFrameIndex == localCurrFrameIndex)
8206  {
8207  return true;
8208  }
8209  else // Last use time earlier than current time.
8210  {
8211  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8212  {
8213  localLastUseFrameIndex = localCurrFrameIndex;
8214  }
8215  }
8216  }
8217  }
8218  else
8219  {
8220  return true;
8221  }
8222 }
8223 
8224 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
8225 {
8226  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
8227 
8228  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
8229 
8230  if(newCreateInfo.maxBlockCount == 0)
8231  {
8232  newCreateInfo.maxBlockCount = SIZE_MAX;
8233  }
8234  if(newCreateInfo.blockSize == 0)
8235  {
8236  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
8237  }
8238 
8239  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
8240 
8241  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8242  if(res != VK_SUCCESS)
8243  {
8244  vma_delete(this, *pPool);
8245  *pPool = VMA_NULL;
8246  return res;
8247  }
8248 
8249  // Add to m_Pools.
8250  {
8251  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8252  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8253  }
8254 
8255  return VK_SUCCESS;
8256 }
8257 
8258 void VmaAllocator_T::DestroyPool(VmaPool pool)
8259 {
8260  // Remove from m_Pools.
8261  {
8262  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8263  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8264  VMA_ASSERT(success && "Pool not found in Allocator.");
8265  }
8266 
8267  vma_delete(this, pool);
8268 }
8269 
8270 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
8271 {
8272  pool->m_BlockVector.GetPoolStats(pPoolStats);
8273 }
8274 
8275 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8276 {
8277  m_CurrentFrameIndex.store(frameIndex);
8278 }
8279 
8280 void VmaAllocator_T::MakePoolAllocationsLost(
8281  VmaPool hPool,
8282  size_t* pLostAllocationCount)
8283 {
8284  hPool->m_BlockVector.MakePoolAllocationsLost(
8285  m_CurrentFrameIndex.load(),
8286  pLostAllocationCount);
8287 }
8288 
8289 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
8290 {
8291  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
8292  (*pAllocation)->InitLost();
8293 }
8294 
8295 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8296 {
8297  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8298 
8299  VkResult res;
8300  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8301  {
8302  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8303  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8304  {
8305  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8306  if(res == VK_SUCCESS)
8307  {
8308  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8309  }
8310  }
8311  else
8312  {
8313  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8314  }
8315  }
8316  else
8317  {
8318  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8319  }
8320 
8321  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
8322  {
8323  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8324  }
8325 
8326  return res;
8327 }
8328 
8329 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8330 {
8331  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
8332  {
8333  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
8334  }
8335 
8336  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8337 
8338  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8339  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8340  {
8341  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8342  m_HeapSizeLimit[heapIndex] += size;
8343  }
8344 }
8345 
8346 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
8347 {
8348  if(hAllocation->CanBecomeLost())
8349  {
8350  return VK_ERROR_MEMORY_MAP_FAILED;
8351  }
8352 
8353  switch(hAllocation->GetType())
8354  {
8355  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8356  {
8357  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
8358  char *pBytes = VMA_NULL;
8359  VkResult res = pBlock->Map(this, 1, (void**)&pBytes);
8360  if(res == VK_SUCCESS)
8361  {
8362  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8363  hAllocation->BlockAllocMap();
8364  }
8365  return res;
8366  }
8367  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8368  return hAllocation->DedicatedAllocMap(this, ppData);
8369  default:
8370  VMA_ASSERT(0);
8371  return VK_ERROR_MEMORY_MAP_FAILED;
8372  }
8373 }
8374 
8375 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
8376 {
8377  switch(hAllocation->GetType())
8378  {
8379  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8380  {
8381  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
8382  hAllocation->BlockAllocUnmap();
8383  pBlock->Unmap(this, 1);
8384  }
8385  break;
8386  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8387  hAllocation->DedicatedAllocUnmap(this);
8388  break;
8389  default:
8390  VMA_ASSERT(0);
8391  }
8392 }
8393 
8394 VkResult VmaAllocator_T::BindBufferMemory(VmaAllocation hAllocation, VkBuffer hBuffer)
8395 {
8396  VkResult res = VK_SUCCESS;
8397  switch(hAllocation->GetType())
8398  {
8399  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8400  res = GetVulkanFunctions().vkBindBufferMemory(
8401  m_hDevice,
8402  hBuffer,
8403  hAllocation->GetMemory(),
8404  0); //memoryOffset
8405  break;
8406  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8407  {
8408  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8409  VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
8410  res = pBlock->BindBufferMemory(this, hAllocation, hBuffer);
8411  break;
8412  }
8413  default:
8414  VMA_ASSERT(0);
8415  }
8416  return res;
8417 }
8418 
8419 VkResult VmaAllocator_T::BindImageMemory(VmaAllocation hAllocation, VkImage hImage)
8420 {
8421  VkResult res = VK_SUCCESS;
8422  switch(hAllocation->GetType())
8423  {
8424  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8425  res = GetVulkanFunctions().vkBindImageMemory(
8426  m_hDevice,
8427  hImage,
8428  hAllocation->GetMemory(),
8429  0); //memoryOffset
8430  break;
8431  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8432  {
8433  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8434  VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
8435  res = pBlock->BindImageMemory(this, hAllocation, hImage);
8436  break;
8437  }
8438  default:
8439  VMA_ASSERT(0);
8440  }
8441  return res;
8442 }
8443 
8444 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
8445 {
8446  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8447 
8448  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8449  {
8450  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8451  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8452  VMA_ASSERT(pDedicatedAllocations);
8453  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8454  VMA_ASSERT(success);
8455  }
8456 
8457  VkDeviceMemory hMemory = allocation->GetMemory();
8458 
8459  if(allocation->GetMappedData() != VMA_NULL)
8460  {
8461  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8462  }
8463 
8464  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8465 
8466  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8467 }
8468 
8469 #if VMA_STATS_STRING_ENABLED
8470 
8471 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8472 {
8473  bool dedicatedAllocationsStarted = false;
8474  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8475  {
8476  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8477  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8478  VMA_ASSERT(pDedicatedAllocVector);
8479  if(pDedicatedAllocVector->empty() == false)
8480  {
8481  if(dedicatedAllocationsStarted == false)
8482  {
8483  dedicatedAllocationsStarted = true;
8484  json.WriteString("DedicatedAllocations");
8485  json.BeginObject();
8486  }
8487 
8488  json.BeginString("Type ");
8489  json.ContinueString(memTypeIndex);
8490  json.EndString();
8491 
8492  json.BeginArray();
8493 
8494  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8495  {
8496  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8497  json.BeginObject(true);
8498 
8499  json.WriteString("Type");
8500  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8501 
8502  json.WriteString("Size");
8503  json.WriteNumber(hAlloc->GetSize());
8504 
8505  const void* pUserData = hAlloc->GetUserData();
8506  if(pUserData != VMA_NULL)
8507  {
8508  json.WriteString("UserData");
8509  if(hAlloc->IsUserDataString())
8510  {
8511  json.WriteString((const char*)pUserData);
8512  }
8513  else
8514  {
8515  json.BeginString();
8516  json.ContinueString_Pointer(pUserData);
8517  json.EndString();
8518  }
8519  }
8520 
8521  json.EndObject();
8522  }
8523 
8524  json.EndArray();
8525  }
8526  }
8527  if(dedicatedAllocationsStarted)
8528  {
8529  json.EndObject();
8530  }
8531 
8532  {
8533  bool allocationsStarted = false;
8534  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8535  {
8536  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
8537  {
8538  if(allocationsStarted == false)
8539  {
8540  allocationsStarted = true;
8541  json.WriteString("DefaultPools");
8542  json.BeginObject();
8543  }
8544 
8545  json.BeginString("Type ");
8546  json.ContinueString(memTypeIndex);
8547  json.EndString();
8548 
8549  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8550  }
8551  }
8552  if(allocationsStarted)
8553  {
8554  json.EndObject();
8555  }
8556  }
8557 
8558  {
8559  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8560  const size_t poolCount = m_Pools.size();
8561  if(poolCount > 0)
8562  {
8563  json.WriteString("Pools");
8564  json.BeginArray();
8565  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8566  {
8567  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8568  }
8569  json.EndArray();
8570  }
8571  }
8572 }
8573 
8574 #endif // #if VMA_STATS_STRING_ENABLED
8575 
8576 static VkResult AllocateMemoryForImage(
8577  VmaAllocator allocator,
8578  VkImage image,
8579  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8580  VmaSuballocationType suballocType,
8581  VmaAllocation* pAllocation)
8582 {
8583  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8584 
8585  VkMemoryRequirements vkMemReq = {};
8586  bool requiresDedicatedAllocation = false;
8587  bool prefersDedicatedAllocation = false;
8588  allocator->GetImageMemoryRequirements(image, vkMemReq,
8589  requiresDedicatedAllocation, prefersDedicatedAllocation);
8590 
8591  return allocator->AllocateMemory(
8592  vkMemReq,
8593  requiresDedicatedAllocation,
8594  prefersDedicatedAllocation,
8595  VK_NULL_HANDLE, // dedicatedBuffer
8596  image, // dedicatedImage
8597  *pAllocationCreateInfo,
8598  suballocType,
8599  pAllocation);
8600 }
8601 
8603 // Public interface
8604 
8605 VkResult vmaCreateAllocator(
8606  const VmaAllocatorCreateInfo* pCreateInfo,
8607  VmaAllocator* pAllocator)
8608 {
8609  VMA_ASSERT(pCreateInfo && pAllocator);
8610  VMA_DEBUG_LOG("vmaCreateAllocator");
8611  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
8612  return VK_SUCCESS;
8613 }
8614 
8615 void vmaDestroyAllocator(
8616  VmaAllocator allocator)
8617 {
8618  if(allocator != VK_NULL_HANDLE)
8619  {
8620  VMA_DEBUG_LOG("vmaDestroyAllocator");
8621  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8622  vma_delete(&allocationCallbacks, allocator);
8623  }
8624 }
8625 
8627  VmaAllocator allocator,
8628  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8629 {
8630  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8631  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8632 }
8633 
8635  VmaAllocator allocator,
8636  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8637 {
8638  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8639  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8640 }
8641 
8643  VmaAllocator allocator,
8644  uint32_t memoryTypeIndex,
8645  VkMemoryPropertyFlags* pFlags)
8646 {
8647  VMA_ASSERT(allocator && pFlags);
8648  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8649  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8650 }
8651 
8653  VmaAllocator allocator,
8654  uint32_t frameIndex)
8655 {
8656  VMA_ASSERT(allocator);
8657  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8658 
8659  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8660 
8661  allocator->SetCurrentFrameIndex(frameIndex);
8662 }
8663 
8664 void vmaCalculateStats(
8665  VmaAllocator allocator,
8666  VmaStats* pStats)
8667 {
8668  VMA_ASSERT(allocator && pStats);
8669  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8670  allocator->CalculateStats(pStats);
8671 }
8672 
8673 #if VMA_STATS_STRING_ENABLED
8674 
8675 void vmaBuildStatsString(
8676  VmaAllocator allocator,
8677  char** ppStatsString,
8678  VkBool32 detailedMap)
8679 {
8680  VMA_ASSERT(allocator && ppStatsString);
8681  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8682 
8683  VmaStringBuilder sb(allocator);
8684  {
8685  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8686  json.BeginObject();
8687 
8688  VmaStats stats;
8689  allocator->CalculateStats(&stats);
8690 
8691  json.WriteString("Total");
8692  VmaPrintStatInfo(json, stats.total);
8693 
8694  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8695  {
8696  json.BeginString("Heap ");
8697  json.ContinueString(heapIndex);
8698  json.EndString();
8699  json.BeginObject();
8700 
8701  json.WriteString("Size");
8702  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8703 
8704  json.WriteString("Flags");
8705  json.BeginArray(true);
8706  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8707  {
8708  json.WriteString("DEVICE_LOCAL");
8709  }
8710  json.EndArray();
8711 
8712  if(stats.memoryHeap[heapIndex].blockCount > 0)
8713  {
8714  json.WriteString("Stats");
8715  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
8716  }
8717 
8718  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8719  {
8720  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8721  {
8722  json.BeginString("Type ");
8723  json.ContinueString(typeIndex);
8724  json.EndString();
8725 
8726  json.BeginObject();
8727 
8728  json.WriteString("Flags");
8729  json.BeginArray(true);
8730  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8731  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8732  {
8733  json.WriteString("DEVICE_LOCAL");
8734  }
8735  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8736  {
8737  json.WriteString("HOST_VISIBLE");
8738  }
8739  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8740  {
8741  json.WriteString("HOST_COHERENT");
8742  }
8743  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8744  {
8745  json.WriteString("HOST_CACHED");
8746  }
8747  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8748  {
8749  json.WriteString("LAZILY_ALLOCATED");
8750  }
8751  json.EndArray();
8752 
8753  if(stats.memoryType[typeIndex].blockCount > 0)
8754  {
8755  json.WriteString("Stats");
8756  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
8757  }
8758 
8759  json.EndObject();
8760  }
8761  }
8762 
8763  json.EndObject();
8764  }
8765  if(detailedMap == VK_TRUE)
8766  {
8767  allocator->PrintDetailedMap(json);
8768  }
8769 
8770  json.EndObject();
8771  }
8772 
8773  const size_t len = sb.GetLength();
8774  char* const pChars = vma_new_array(allocator, char, len + 1);
8775  if(len > 0)
8776  {
8777  memcpy(pChars, sb.GetData(), len);
8778  }
8779  pChars[len] = '\0';
8780  *ppStatsString = pChars;
8781 }
8782 
8783 void vmaFreeStatsString(
8784  VmaAllocator allocator,
8785  char* pStatsString)
8786 {
8787  if(pStatsString != VMA_NULL)
8788  {
8789  VMA_ASSERT(allocator);
8790  size_t len = strlen(pStatsString);
8791  vma_delete_array(allocator, pStatsString, len + 1);
8792  }
8793 }
8794 
8795 #endif // #if VMA_STATS_STRING_ENABLED
8796 
8797 /*
8798 This function is not protected by any mutex because it just reads immutable data.
8799 */
8800 VkResult vmaFindMemoryTypeIndex(
8801  VmaAllocator allocator,
8802  uint32_t memoryTypeBits,
8803  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8804  uint32_t* pMemoryTypeIndex)
8805 {
8806  VMA_ASSERT(allocator != VK_NULL_HANDLE);
8807  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8808  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8809 
8810  if(pAllocationCreateInfo->memoryTypeBits != 0)
8811  {
8812  memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
8813  }
8814 
8815  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
8816  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
8817 
8818  // Convert usage to requiredFlags and preferredFlags.
8819  switch(pAllocationCreateInfo->usage)
8820  {
8822  break;
8824  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8825  break;
8827  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8828  break;
8830  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8831  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8832  break;
8834  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8835  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8836  break;
8837  default:
8838  break;
8839  }
8840 
8841  *pMemoryTypeIndex = UINT32_MAX;
8842  uint32_t minCost = UINT32_MAX;
8843  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8844  memTypeIndex < allocator->GetMemoryTypeCount();
8845  ++memTypeIndex, memTypeBit <<= 1)
8846  {
8847  // This memory type is acceptable according to memoryTypeBits bitmask.
8848  if((memTypeBit & memoryTypeBits) != 0)
8849  {
8850  const VkMemoryPropertyFlags currFlags =
8851  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8852  // This memory type contains requiredFlags.
8853  if((requiredFlags & ~currFlags) == 0)
8854  {
8855  // Calculate cost as number of bits from preferredFlags not present in this memory type.
8856  uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8857  // Remember memory type with lowest cost.
8858  if(currCost < minCost)
8859  {
8860  *pMemoryTypeIndex = memTypeIndex;
8861  if(currCost == 0)
8862  {
8863  return VK_SUCCESS;
8864  }
8865  minCost = currCost;
8866  }
8867  }
8868  }
8869  }
8870  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8871 }
8872 
8874  VmaAllocator allocator,
8875  const VkBufferCreateInfo* pBufferCreateInfo,
8876  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8877  uint32_t* pMemoryTypeIndex)
8878 {
8879  VMA_ASSERT(allocator != VK_NULL_HANDLE);
8880  VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8881  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8882  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8883 
8884  const VkDevice hDev = allocator->m_hDevice;
8885  VkBuffer hBuffer = VK_NULL_HANDLE;
8886  VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8887  hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8888  if(res == VK_SUCCESS)
8889  {
8890  VkMemoryRequirements memReq = {};
8891  allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8892  hDev, hBuffer, &memReq);
8893 
8894  res = vmaFindMemoryTypeIndex(
8895  allocator,
8896  memReq.memoryTypeBits,
8897  pAllocationCreateInfo,
8898  pMemoryTypeIndex);
8899 
8900  allocator->GetVulkanFunctions().vkDestroyBuffer(
8901  hDev, hBuffer, allocator->GetAllocationCallbacks());
8902  }
8903  return res;
8904 }
8905 
8907  VmaAllocator allocator,
8908  const VkImageCreateInfo* pImageCreateInfo,
8909  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8910  uint32_t* pMemoryTypeIndex)
8911 {
8912  VMA_ASSERT(allocator != VK_NULL_HANDLE);
8913  VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8914  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8915  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8916 
8917  const VkDevice hDev = allocator->m_hDevice;
8918  VkImage hImage = VK_NULL_HANDLE;
8919  VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8920  hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8921  if(res == VK_SUCCESS)
8922  {
8923  VkMemoryRequirements memReq = {};
8924  allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8925  hDev, hImage, &memReq);
8926 
8927  res = vmaFindMemoryTypeIndex(
8928  allocator,
8929  memReq.memoryTypeBits,
8930  pAllocationCreateInfo,
8931  pMemoryTypeIndex);
8932 
8933  allocator->GetVulkanFunctions().vkDestroyImage(
8934  hDev, hImage, allocator->GetAllocationCallbacks());
8935  }
8936  return res;
8937 }
8938 
8939 VkResult vmaCreatePool(
8940  VmaAllocator allocator,
8941  const VmaPoolCreateInfo* pCreateInfo,
8942  VmaPool* pPool)
8943 {
8944  VMA_ASSERT(allocator && pCreateInfo && pPool);
8945 
8946  VMA_DEBUG_LOG("vmaCreatePool");
8947 
8948  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8949 
8950  return allocator->CreatePool(pCreateInfo, pPool);
8951 }
8952 
8953 void vmaDestroyPool(
8954  VmaAllocator allocator,
8955  VmaPool pool)
8956 {
8957  VMA_ASSERT(allocator);
8958 
8959  if(pool == VK_NULL_HANDLE)
8960  {
8961  return;
8962  }
8963 
8964  VMA_DEBUG_LOG("vmaDestroyPool");
8965 
8966  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8967 
8968  allocator->DestroyPool(pool);
8969 }
8970 
8971 void vmaGetPoolStats(
8972  VmaAllocator allocator,
8973  VmaPool pool,
8974  VmaPoolStats* pPoolStats)
8975 {
8976  VMA_ASSERT(allocator && pool && pPoolStats);
8977 
8978  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8979 
8980  allocator->GetPoolStats(pool, pPoolStats);
8981 }
8982 
8984  VmaAllocator allocator,
8985  VmaPool pool,
8986  size_t* pLostAllocationCount)
8987 {
8988  VMA_ASSERT(allocator && pool);
8989 
8990  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8991 
8992  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8993 }
8994 
8995 VkResult vmaAllocateMemory(
8996  VmaAllocator allocator,
8997  const VkMemoryRequirements* pVkMemoryRequirements,
8998  const VmaAllocationCreateInfo* pCreateInfo,
8999  VmaAllocation* pAllocation,
9000  VmaAllocationInfo* pAllocationInfo)
9001 {
9002  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
9003 
9004  VMA_DEBUG_LOG("vmaAllocateMemory");
9005 
9006  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9007 
9008  VkResult result = allocator->AllocateMemory(
9009  *pVkMemoryRequirements,
9010  false, // requiresDedicatedAllocation
9011  false, // prefersDedicatedAllocation
9012  VK_NULL_HANDLE, // dedicatedBuffer
9013  VK_NULL_HANDLE, // dedicatedImage
9014  *pCreateInfo,
9015  VMA_SUBALLOCATION_TYPE_UNKNOWN,
9016  pAllocation);
9017 
9018  if(pAllocationInfo && result == VK_SUCCESS)
9019  {
9020  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9021  }
9022 
9023  return result;
9024 }
9025 
9027  VmaAllocator allocator,
9028  VkBuffer buffer,
9029  const VmaAllocationCreateInfo* pCreateInfo,
9030  VmaAllocation* pAllocation,
9031  VmaAllocationInfo* pAllocationInfo)
9032 {
9033  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9034 
9035  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
9036 
9037  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9038 
9039  VkMemoryRequirements vkMemReq = {};
9040  bool requiresDedicatedAllocation = false;
9041  bool prefersDedicatedAllocation = false;
9042  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
9043  requiresDedicatedAllocation,
9044  prefersDedicatedAllocation);
9045 
9046  VkResult result = allocator->AllocateMemory(
9047  vkMemReq,
9048  requiresDedicatedAllocation,
9049  prefersDedicatedAllocation,
9050  buffer, // dedicatedBuffer
9051  VK_NULL_HANDLE, // dedicatedImage
9052  *pCreateInfo,
9053  VMA_SUBALLOCATION_TYPE_BUFFER,
9054  pAllocation);
9055 
9056  if(pAllocationInfo && result == VK_SUCCESS)
9057  {
9058  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9059  }
9060 
9061  return result;
9062 }
9063 
9064 VkResult vmaAllocateMemoryForImage(
9065  VmaAllocator allocator,
9066  VkImage image,
9067  const VmaAllocationCreateInfo* pCreateInfo,
9068  VmaAllocation* pAllocation,
9069  VmaAllocationInfo* pAllocationInfo)
9070 {
9071  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9072 
9073  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
9074 
9075  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9076 
9077  VkResult result = AllocateMemoryForImage(
9078  allocator,
9079  image,
9080  pCreateInfo,
9081  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
9082  pAllocation);
9083 
9084  if(pAllocationInfo && result == VK_SUCCESS)
9085  {
9086  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9087  }
9088 
9089  return result;
9090 }
9091 
9092 void vmaFreeMemory(
9093  VmaAllocator allocator,
9094  VmaAllocation allocation)
9095 {
9096  VMA_ASSERT(allocator);
9097  VMA_DEBUG_LOG("vmaFreeMemory");
9098  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9099  if(allocation != VK_NULL_HANDLE)
9100  {
9101  allocator->FreeMemory(allocation);
9102  }
9103 }
9104 
9106  VmaAllocator allocator,
9107  VmaAllocation allocation,
9108  VmaAllocationInfo* pAllocationInfo)
9109 {
9110  VMA_ASSERT(allocator && allocation && pAllocationInfo);
9111 
9112  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9113 
9114  allocator->GetAllocationInfo(allocation, pAllocationInfo);
9115 }
9116 
9117 VkBool32 vmaTouchAllocation(
9118  VmaAllocator allocator,
9119  VmaAllocation allocation)
9120 {
9121  VMA_ASSERT(allocator && allocation);
9122 
9123  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9124 
9125  return allocator->TouchAllocation(allocation);
9126 }
9127 
9129  VmaAllocator allocator,
9130  VmaAllocation allocation,
9131  void* pUserData)
9132 {
9133  VMA_ASSERT(allocator && allocation);
9134 
9135  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9136 
9137  allocation->SetUserData(allocator, pUserData);
9138 }
9139 
9141  VmaAllocator allocator,
9142  VmaAllocation* pAllocation)
9143 {
9144  VMA_ASSERT(allocator && pAllocation);
9145 
9146  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
9147 
9148  allocator->CreateLostAllocation(pAllocation);
9149 }
9150 
9151 VkResult vmaMapMemory(
9152  VmaAllocator allocator,
9153  VmaAllocation allocation,
9154  void** ppData)
9155 {
9156  VMA_ASSERT(allocator && allocation && ppData);
9157 
9158  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9159 
9160  return allocator->Map(allocation, ppData);
9161 }
9162 
9163 void vmaUnmapMemory(
9164  VmaAllocator allocator,
9165  VmaAllocation allocation)
9166 {
9167  VMA_ASSERT(allocator && allocation);
9168 
9169  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9170 
9171  allocator->Unmap(allocation);
9172 }
9173 
9174 VkResult vmaDefragment(
9175  VmaAllocator allocator,
9176  VmaAllocation* pAllocations,
9177  size_t allocationCount,
9178  VkBool32* pAllocationsChanged,
9179  const VmaDefragmentationInfo *pDefragmentationInfo,
9180  VmaDefragmentationStats* pDefragmentationStats)
9181 {
9182  VMA_ASSERT(allocator && pAllocations);
9183 
9184  VMA_DEBUG_LOG("vmaDefragment");
9185 
9186  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9187 
9188  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
9189 }
9190 
9191 VkResult vmaBindBufferMemory(
9192  VmaAllocator allocator,
9193  VmaAllocation allocation,
9194  VkBuffer buffer)
9195 {
9196  VMA_ASSERT(allocator && allocation && buffer);
9197 
9198  VMA_DEBUG_LOG("vmaBindBufferMemory");
9199 
9200  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9201 
9202  return allocator->BindBufferMemory(allocation, buffer);
9203 }
9204 
9205 VkResult vmaBindImageMemory(
9206  VmaAllocator allocator,
9207  VmaAllocation allocation,
9208  VkImage image)
9209 {
9210  VMA_ASSERT(allocator && allocation && image);
9211 
9212  VMA_DEBUG_LOG("vmaBindImageMemory");
9213 
9214  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9215 
9216  return allocator->BindImageMemory(allocation, image);
9217 }
9218 
9219 VkResult vmaCreateBuffer(
9220  VmaAllocator allocator,
9221  const VkBufferCreateInfo* pBufferCreateInfo,
9222  const VmaAllocationCreateInfo* pAllocationCreateInfo,
9223  VkBuffer* pBuffer,
9224  VmaAllocation* pAllocation,
9225  VmaAllocationInfo* pAllocationInfo)
9226 {
9227  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
9228 
9229  VMA_DEBUG_LOG("vmaCreateBuffer");
9230 
9231  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9232 
9233  *pBuffer = VK_NULL_HANDLE;
9234  *pAllocation = VK_NULL_HANDLE;
9235 
9236  // 1. Create VkBuffer.
9237  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
9238  allocator->m_hDevice,
9239  pBufferCreateInfo,
9240  allocator->GetAllocationCallbacks(),
9241  pBuffer);
9242  if(res >= 0)
9243  {
9244  // 2. vkGetBufferMemoryRequirements.
9245  VkMemoryRequirements vkMemReq = {};
9246  bool requiresDedicatedAllocation = false;
9247  bool prefersDedicatedAllocation = false;
9248  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
9249  requiresDedicatedAllocation, prefersDedicatedAllocation);
9250 
9251  // Make sure alignment requirements for specific buffer usages reported
9252  // in Physical Device Properties are included in alignment reported by memory requirements.
9253  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
9254  {
9255  VMA_ASSERT(vkMemReq.alignment %
9256  allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
9257  }
9258  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
9259  {
9260  VMA_ASSERT(vkMemReq.alignment %
9261  allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
9262  }
9263  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
9264  {
9265  VMA_ASSERT(vkMemReq.alignment %
9266  allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
9267  }
9268 
9269  // 3. Allocate memory using allocator.
9270  res = allocator->AllocateMemory(
9271  vkMemReq,
9272  requiresDedicatedAllocation,
9273  prefersDedicatedAllocation,
9274  *pBuffer, // dedicatedBuffer
9275  VK_NULL_HANDLE, // dedicatedImage
9276  *pAllocationCreateInfo,
9277  VMA_SUBALLOCATION_TYPE_BUFFER,
9278  pAllocation);
9279  if(res >= 0)
9280  {
9281  // 3. Bind buffer with memory.
9282  res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
9283  if(res >= 0)
9284  {
9285  // All steps succeeded.
9286  if(pAllocationInfo != VMA_NULL)
9287  {
9288  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9289  }
9290  return VK_SUCCESS;
9291  }
9292  allocator->FreeMemory(*pAllocation);
9293  *pAllocation = VK_NULL_HANDLE;
9294  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9295  *pBuffer = VK_NULL_HANDLE;
9296  return res;
9297  }
9298  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9299  *pBuffer = VK_NULL_HANDLE;
9300  return res;
9301  }
9302  return res;
9303 }
9304 
9305 void vmaDestroyBuffer(
9306  VmaAllocator allocator,
9307  VkBuffer buffer,
9308  VmaAllocation allocation)
9309 {
9310  VMA_ASSERT(allocator);
9311  VMA_DEBUG_LOG("vmaDestroyBuffer");
9312  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9313  if(buffer != VK_NULL_HANDLE)
9314  {
9315  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
9316  }
9317  if(allocation != VK_NULL_HANDLE)
9318  {
9319  allocator->FreeMemory(allocation);
9320  }
9321 }
9322 
9323 VkResult vmaCreateImage(
9324  VmaAllocator allocator,
9325  const VkImageCreateInfo* pImageCreateInfo,
9326  const VmaAllocationCreateInfo* pAllocationCreateInfo,
9327  VkImage* pImage,
9328  VmaAllocation* pAllocation,
9329  VmaAllocationInfo* pAllocationInfo)
9330 {
9331  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
9332 
9333  VMA_DEBUG_LOG("vmaCreateImage");
9334 
9335  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9336 
9337  *pImage = VK_NULL_HANDLE;
9338  *pAllocation = VK_NULL_HANDLE;
9339 
9340  // 1. Create VkImage.
9341  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9342  allocator->m_hDevice,
9343  pImageCreateInfo,
9344  allocator->GetAllocationCallbacks(),
9345  pImage);
9346  if(res >= 0)
9347  {
9348  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9349  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9350  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9351 
9352  // 2. Allocate memory using allocator.
9353  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9354  if(res >= 0)
9355  {
9356  // 3. Bind image with memory.
9357  res = allocator->BindImageMemory(*pAllocation, *pImage);
9358  if(res >= 0)
9359  {
9360  // All steps succeeded.
9361  if(pAllocationInfo != VMA_NULL)
9362  {
9363  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9364  }
9365  return VK_SUCCESS;
9366  }
9367  allocator->FreeMemory(*pAllocation);
9368  *pAllocation = VK_NULL_HANDLE;
9369  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9370  *pImage = VK_NULL_HANDLE;
9371  return res;
9372  }
9373  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9374  *pImage = VK_NULL_HANDLE;
9375  return res;
9376  }
9377  return res;
9378 }
9379 
9380 void vmaDestroyImage(
9381  VmaAllocator allocator,
9382  VkImage image,
9383  VmaAllocation allocation)
9384 {
9385  VMA_ASSERT(allocator);
9386  VMA_DEBUG_LOG("vmaDestroyImage");
9387  VMA_DEBUG_GLOBAL_MUTEX_LOCK
9388  if(image != VK_NULL_HANDLE)
9389  {
9390  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9391  }
9392  if(allocation != VK_NULL_HANDLE)
9393  {
9394  allocator->FreeMemory(allocation);
9395  }
9396 }
9397 
9398 #endif // #ifdef VMA_IMPLEMENTATION
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1169
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1433
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1196
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Represents single memory allocation.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1179
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1390
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1173
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1763
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1193
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1962
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1609
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1663
Definition: vk_mem_alloc.h:1470
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1162
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1508
Definition: vk_mem_alloc.h:1417
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1205
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1258
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1190
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1421
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1323
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1176
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1322
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1966
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1222
VmaStatInfo total
Definition: vk_mem_alloc.h:1332
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1974
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1492
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1957
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1177
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1104
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1199
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1617
Definition: vk_mem_alloc.h:1611
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1773
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1174
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1529
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1633
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1669
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1160
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1620
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1368
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1952
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1970
Definition: vk_mem_alloc.h:1407
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1516
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1175
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1328
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1110
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1131
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1136
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1972
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1503
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1679
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1170
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1311
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1628
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1123
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1477
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1324
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1127
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1623
Definition: vk_mem_alloc.h:1416
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1498
Definition: vk_mem_alloc.h:1489
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1314
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1172
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1641
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1208
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1672
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1487
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1522
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1246
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1330
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1457
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1323
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1181
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1125
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1180
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1655
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1787
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1202
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1323
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1320
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1660
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1768
Definition: vk_mem_alloc.h:1485
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1968
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1168
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1318
Definition: vk_mem_alloc.h:1373
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1613
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1316
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1178
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1182
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1444
Definition: vk_mem_alloc.h:1400
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1782
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1158
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1171
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1749
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1591
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1324
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1331
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1666
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1324
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1754