Vulkan Memory Allocator
vk_mem_alloc.h
Go to the documentation of this file.
1 //
2 // Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved.
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21 //
22 
23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
25 
26 #ifdef __cplusplus
27 extern "C" {
28 #endif
29 
723 #include <vulkan/vulkan.h>
724 
725 VK_DEFINE_HANDLE(VmaAllocator)
726 
727 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
729  VmaAllocator allocator,
730  uint32_t memoryType,
731  VkDeviceMemory memory,
732  VkDeviceSize size);
734 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
735  VmaAllocator allocator,
736  uint32_t memoryType,
737  VkDeviceMemory memory,
738  VkDeviceSize size);
739 
747 typedef struct VmaDeviceMemoryCallbacks {
753 
783 
786 typedef VkFlags VmaAllocatorCreateFlags;
787 
792 typedef struct VmaVulkanFunctions {
793  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
794  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
795  PFN_vkAllocateMemory vkAllocateMemory;
796  PFN_vkFreeMemory vkFreeMemory;
797  PFN_vkMapMemory vkMapMemory;
798  PFN_vkUnmapMemory vkUnmapMemory;
799  PFN_vkBindBufferMemory vkBindBufferMemory;
800  PFN_vkBindImageMemory vkBindImageMemory;
801  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
802  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
803  PFN_vkCreateBuffer vkCreateBuffer;
804  PFN_vkDestroyBuffer vkDestroyBuffer;
805  PFN_vkCreateImage vkCreateImage;
806  PFN_vkDestroyImage vkDestroyImage;
807  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
808  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
810 
813 {
815  VmaAllocatorCreateFlags flags;
817 
818  VkPhysicalDevice physicalDevice;
820 
821  VkDevice device;
823 
826 
827  const VkAllocationCallbacks* pAllocationCallbacks;
829 
844  uint32_t frameInUseCount;
868  const VkDeviceSize* pHeapSizeLimit;
882 
884 VkResult vmaCreateAllocator(
885  const VmaAllocatorCreateInfo* pCreateInfo,
886  VmaAllocator* pAllocator);
887 
890  VmaAllocator allocator);
891 
897  VmaAllocator allocator,
898  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
899 
905  VmaAllocator allocator,
906  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
907 
915  VmaAllocator allocator,
916  uint32_t memoryTypeIndex,
917  VkMemoryPropertyFlags* pFlags);
918 
928  VmaAllocator allocator,
929  uint32_t frameIndex);
930 
933 typedef struct VmaStatInfo
934 {
936  uint32_t blockCount;
938  uint32_t allocationCount;
942  VkDeviceSize usedBytes;
944  VkDeviceSize unusedBytes;
945  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
946  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
947 } VmaStatInfo;
948 
950 typedef struct VmaStats
951 {
952  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
953  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
955 } VmaStats;
956 
958 void vmaCalculateStats(
959  VmaAllocator allocator,
960  VmaStats* pStats);
961 
962 #define VMA_STATS_STRING_ENABLED 1
963 
964 #if VMA_STATS_STRING_ENABLED
965 
967 
970  VmaAllocator allocator,
971  char** ppStatsString,
972  VkBool32 detailedMap);
973 
974 void vmaFreeStatsString(
975  VmaAllocator allocator,
976  char* pStatsString);
977 
978 #endif // #if VMA_STATS_STRING_ENABLED
979 
980 VK_DEFINE_HANDLE(VmaPool)
981 
982 typedef enum VmaMemoryUsage
983 {
1032 } VmaMemoryUsage;
1033 
1048 
1098 
1102 
1104 {
1106  VmaAllocationCreateFlags flags;
1117  VkMemoryPropertyFlags requiredFlags;
1122  VkMemoryPropertyFlags preferredFlags;
1130  uint32_t memoryTypeBits;
1136  VmaPool pool;
1143  void* pUserData;
1145 
1160 VkResult vmaFindMemoryTypeIndex(
1161  VmaAllocator allocator,
1162  uint32_t memoryTypeBits,
1163  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1164  uint32_t* pMemoryTypeIndex);
1165 
1186 
1189 typedef VkFlags VmaPoolCreateFlags;
1190 
1193 typedef struct VmaPoolCreateInfo {
1199  VmaPoolCreateFlags flags;
1204  VkDeviceSize blockSize;
1233 
1236 typedef struct VmaPoolStats {
1239  VkDeviceSize size;
1242  VkDeviceSize unusedSize;
1255  VkDeviceSize unusedRangeSizeMax;
1256 } VmaPoolStats;
1257 
1264 VkResult vmaCreatePool(
1265  VmaAllocator allocator,
1266  const VmaPoolCreateInfo* pCreateInfo,
1267  VmaPool* pPool);
1268 
1271 void vmaDestroyPool(
1272  VmaAllocator allocator,
1273  VmaPool pool);
1274 
1281 void vmaGetPoolStats(
1282  VmaAllocator allocator,
1283  VmaPool pool,
1284  VmaPoolStats* pPoolStats);
1285 
1293  VmaAllocator allocator,
1294  VmaPool pool,
1295  size_t* pLostAllocationCount);
1296 
1297 VK_DEFINE_HANDLE(VmaAllocation)
1298 
1299 
1301 typedef struct VmaAllocationInfo {
1306  uint32_t memoryType;
1315  VkDeviceMemory deviceMemory;
1320  VkDeviceSize offset;
1325  VkDeviceSize size;
1339  void* pUserData;
1341 
1352 VkResult vmaAllocateMemory(
1353  VmaAllocator allocator,
1354  const VkMemoryRequirements* pVkMemoryRequirements,
1355  const VmaAllocationCreateInfo* pCreateInfo,
1356  VmaAllocation* pAllocation,
1357  VmaAllocationInfo* pAllocationInfo);
1358 
1366  VmaAllocator allocator,
1367  VkBuffer buffer,
1368  const VmaAllocationCreateInfo* pCreateInfo,
1369  VmaAllocation* pAllocation,
1370  VmaAllocationInfo* pAllocationInfo);
1371 
1373 VkResult vmaAllocateMemoryForImage(
1374  VmaAllocator allocator,
1375  VkImage image,
1376  const VmaAllocationCreateInfo* pCreateInfo,
1377  VmaAllocation* pAllocation,
1378  VmaAllocationInfo* pAllocationInfo);
1379 
1381 void vmaFreeMemory(
1382  VmaAllocator allocator,
1383  VmaAllocation allocation);
1384 
1387  VmaAllocator allocator,
1388  VmaAllocation allocation,
1389  VmaAllocationInfo* pAllocationInfo);
1390 
1405  VmaAllocator allocator,
1406  VmaAllocation allocation,
1407  void* pUserData);
1408 
1420  VmaAllocator allocator,
1421  VmaAllocation* pAllocation);
1422 
1457 VkResult vmaMapMemory(
1458  VmaAllocator allocator,
1459  VmaAllocation allocation,
1460  void** ppData);
1461 
1466 void vmaUnmapMemory(
1467  VmaAllocator allocator,
1468  VmaAllocation allocation);
1469 
1471 typedef struct VmaDefragmentationInfo {
1476  VkDeviceSize maxBytesToMove;
1483 
1485 typedef struct VmaDefragmentationStats {
1487  VkDeviceSize bytesMoved;
1489  VkDeviceSize bytesFreed;
1495 
1572 VkResult vmaDefragment(
1573  VmaAllocator allocator,
1574  VmaAllocation* pAllocations,
1575  size_t allocationCount,
1576  VkBool32* pAllocationsChanged,
1577  const VmaDefragmentationInfo *pDefragmentationInfo,
1578  VmaDefragmentationStats* pDefragmentationStats);
1579 
1606 VkResult vmaCreateBuffer(
1607  VmaAllocator allocator,
1608  const VkBufferCreateInfo* pBufferCreateInfo,
1609  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1610  VkBuffer* pBuffer,
1611  VmaAllocation* pAllocation,
1612  VmaAllocationInfo* pAllocationInfo);
1613 
1625 void vmaDestroyBuffer(
1626  VmaAllocator allocator,
1627  VkBuffer buffer,
1628  VmaAllocation allocation);
1629 
1631 VkResult vmaCreateImage(
1632  VmaAllocator allocator,
1633  const VkImageCreateInfo* pImageCreateInfo,
1634  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1635  VkImage* pImage,
1636  VmaAllocation* pAllocation,
1637  VmaAllocationInfo* pAllocationInfo);
1638 
1650 void vmaDestroyImage(
1651  VmaAllocator allocator,
1652  VkImage image,
1653  VmaAllocation allocation);
1654 
1655 #ifdef __cplusplus
1656 }
1657 #endif
1658 
1659 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
1660 
1661 // For Visual Studio IntelliSense.
1662 #ifdef __INTELLISENSE__
1663 #define VMA_IMPLEMENTATION
1664 #endif
1665 
1666 #ifdef VMA_IMPLEMENTATION
1667 #undef VMA_IMPLEMENTATION
1668 
1669 #include <cstdint>
1670 #include <cstdlib>
1671 #include <cstring>
1672 
1673 /*******************************************************************************
1674 CONFIGURATION SECTION
1675 
1676 Define some of these macros before each #include of this header or change them
1677 here if you need other then default behavior depending on your environment.
1678 */
1679 
1680 /*
1681 Define this macro to 1 to make the library fetch pointers to Vulkan functions
1682 internally, like:
1683 
1684  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
1685 
1686 Define to 0 if you are going to provide you own pointers to Vulkan functions via
1687 VmaAllocatorCreateInfo::pVulkanFunctions.
1688 */
1689 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
1690 #define VMA_STATIC_VULKAN_FUNCTIONS 1
1691 #endif
1692 
1693 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
1694 //#define VMA_USE_STL_CONTAINERS 1
1695 
1696 /* Set this macro to 1 to make the library including and using STL containers:
1697 std::pair, std::vector, std::list, std::unordered_map.
1698 
1699 Set it to 0 or undefined to make the library using its own implementation of
1700 the containers.
1701 */
1702 #if VMA_USE_STL_CONTAINERS
1703  #define VMA_USE_STL_VECTOR 1
1704  #define VMA_USE_STL_UNORDERED_MAP 1
1705  #define VMA_USE_STL_LIST 1
1706 #endif
1707 
1708 #if VMA_USE_STL_VECTOR
1709  #include <vector>
1710 #endif
1711 
1712 #if VMA_USE_STL_UNORDERED_MAP
1713  #include <unordered_map>
1714 #endif
1715 
1716 #if VMA_USE_STL_LIST
1717  #include <list>
1718 #endif
1719 
1720 /*
1721 Following headers are used in this CONFIGURATION section only, so feel free to
1722 remove them if not needed.
1723 */
1724 #include <cassert> // for assert
1725 #include <algorithm> // for min, max
1726 #include <mutex> // for std::mutex
1727 #include <atomic> // for std::atomic
1728 
1729 #if !defined(_WIN32)
1730  #include <malloc.h> // for aligned_alloc()
1731 #endif
1732 
1733 // Normal assert to check for programmer's errors, especially in Debug configuration.
1734 #ifndef VMA_ASSERT
1735  #ifdef _DEBUG
1736  #define VMA_ASSERT(expr) assert(expr)
1737  #else
1738  #define VMA_ASSERT(expr)
1739  #endif
1740 #endif
1741 
1742 // Assert that will be called very often, like inside data structures e.g. operator[].
1743 // Making it non-empty can make program slow.
1744 #ifndef VMA_HEAVY_ASSERT
1745  #ifdef _DEBUG
1746  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
1747  #else
1748  #define VMA_HEAVY_ASSERT(expr)
1749  #endif
1750 #endif
1751 
1752 #ifndef VMA_NULL
1753  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
1754  #define VMA_NULL nullptr
1755 #endif
1756 
1757 #ifndef VMA_ALIGN_OF
1758  #define VMA_ALIGN_OF(type) (__alignof(type))
1759 #endif
1760 
1761 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
1762  #if defined(_WIN32)
1763  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
1764  #else
1765  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
1766  #endif
1767 #endif
1768 
1769 #ifndef VMA_SYSTEM_FREE
1770  #if defined(_WIN32)
1771  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
1772  #else
1773  #define VMA_SYSTEM_FREE(ptr) free(ptr)
1774  #endif
1775 #endif
1776 
1777 #ifndef VMA_MIN
1778  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
1779 #endif
1780 
1781 #ifndef VMA_MAX
1782  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
1783 #endif
1784 
1785 #ifndef VMA_SWAP
1786  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
1787 #endif
1788 
1789 #ifndef VMA_SORT
1790  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
1791 #endif
1792 
1793 #ifndef VMA_DEBUG_LOG
1794  #define VMA_DEBUG_LOG(format, ...)
1795  /*
1796  #define VMA_DEBUG_LOG(format, ...) do { \
1797  printf(format, __VA_ARGS__); \
1798  printf("\n"); \
1799  } while(false)
1800  */
1801 #endif
1802 
1803 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
1804 #if VMA_STATS_STRING_ENABLED
1805  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
1806  {
1807  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
1808  }
1809  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
1810  {
1811  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
1812  }
1813  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
1814  {
1815  snprintf(outStr, strLen, "%p", ptr);
1816  }
1817 #endif
1818 
1819 #ifndef VMA_MUTEX
1820  class VmaMutex
1821  {
1822  public:
1823  VmaMutex() { }
1824  ~VmaMutex() { }
1825  void Lock() { m_Mutex.lock(); }
1826  void Unlock() { m_Mutex.unlock(); }
1827  private:
1828  std::mutex m_Mutex;
1829  };
1830  #define VMA_MUTEX VmaMutex
1831 #endif
1832 
1833 /*
1834 If providing your own implementation, you need to implement a subset of std::atomic:
1835 
1836 - Constructor(uint32_t desired)
1837 - uint32_t load() const
1838 - void store(uint32_t desired)
1839 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
1840 */
1841 #ifndef VMA_ATOMIC_UINT32
1842  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
1843 #endif
1844 
1845 #ifndef VMA_BEST_FIT
1846 
1858  #define VMA_BEST_FIT (1)
1859 #endif
1860 
1861 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
1862 
1866  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
1867 #endif
1868 
1869 #ifndef VMA_DEBUG_ALIGNMENT
1870 
1874  #define VMA_DEBUG_ALIGNMENT (1)
1875 #endif
1876 
1877 #ifndef VMA_DEBUG_MARGIN
1878 
1882  #define VMA_DEBUG_MARGIN (0)
1883 #endif
1884 
1885 #ifndef VMA_DEBUG_GLOBAL_MUTEX
1886 
1890  #define VMA_DEBUG_GLOBAL_MUTEX (0)
1891 #endif
1892 
1893 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
1894 
1898  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
1899 #endif
1900 
1901 #ifndef VMA_SMALL_HEAP_MAX_SIZE
1902  #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
1904 #endif
1905 
1906 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
1907  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
1909 #endif
1910 
1911 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1912 
1913 /*******************************************************************************
1914 END OF CONFIGURATION
1915 */
1916 
1917 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1918  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1919 
1920 // Returns number of bits set to 1 in (v).
1921 static inline uint32_t VmaCountBitsSet(uint32_t v)
1922 {
1923  uint32_t c = v - ((v >> 1) & 0x55555555);
1924  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1925  c = ((c >> 4) + c) & 0x0F0F0F0F;
1926  c = ((c >> 8) + c) & 0x00FF00FF;
1927  c = ((c >> 16) + c) & 0x0000FFFF;
1928  return c;
1929 }
1930 
1931 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
1932 // Use types like uint32_t, uint64_t as T.
1933 template <typename T>
1934 static inline T VmaAlignUp(T val, T align)
1935 {
1936  return (val + align - 1) / align * align;
1937 }
1938 
1939 // Division with mathematical rounding to nearest number.
1940 template <typename T>
1941 inline T VmaRoundDiv(T x, T y)
1942 {
1943  return (x + (y / (T)2)) / y;
1944 }
1945 
1946 #ifndef VMA_SORT
1947 
1948 template<typename Iterator, typename Compare>
1949 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1950 {
1951  Iterator centerValue = end; --centerValue;
1952  Iterator insertIndex = beg;
1953  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1954  {
1955  if(cmp(*memTypeIndex, *centerValue))
1956  {
1957  if(insertIndex != memTypeIndex)
1958  {
1959  VMA_SWAP(*memTypeIndex, *insertIndex);
1960  }
1961  ++insertIndex;
1962  }
1963  }
1964  if(insertIndex != centerValue)
1965  {
1966  VMA_SWAP(*insertIndex, *centerValue);
1967  }
1968  return insertIndex;
1969 }
1970 
1971 template<typename Iterator, typename Compare>
1972 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1973 {
1974  if(beg < end)
1975  {
1976  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1977  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1978  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1979  }
1980 }
1981 
1982 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
1983 
1984 #endif // #ifndef VMA_SORT
1985 
1986 /*
1987 Returns true if two memory blocks occupy overlapping pages.
1988 ResourceA must be in less memory offset than ResourceB.
1989 
1990 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
1991 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
1992 */
1993 static inline bool VmaBlocksOnSamePage(
1994  VkDeviceSize resourceAOffset,
1995  VkDeviceSize resourceASize,
1996  VkDeviceSize resourceBOffset,
1997  VkDeviceSize pageSize)
1998 {
1999  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2000  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2001  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2002  VkDeviceSize resourceBStart = resourceBOffset;
2003  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2004  return resourceAEndPage == resourceBStartPage;
2005 }
2006 
2007 enum VmaSuballocationType
2008 {
2009  VMA_SUBALLOCATION_TYPE_FREE = 0,
2010  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2011  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2012  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2013  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2014  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2015  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2016 };
2017 
2018 /*
2019 Returns true if given suballocation types could conflict and must respect
2020 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
2021 or linear image and another one is optimal image. If type is unknown, behave
2022 conservatively.
2023 */
2024 static inline bool VmaIsBufferImageGranularityConflict(
2025  VmaSuballocationType suballocType1,
2026  VmaSuballocationType suballocType2)
2027 {
2028  if(suballocType1 > suballocType2)
2029  {
2030  VMA_SWAP(suballocType1, suballocType2);
2031  }
2032 
2033  switch(suballocType1)
2034  {
2035  case VMA_SUBALLOCATION_TYPE_FREE:
2036  return false;
2037  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2038  return true;
2039  case VMA_SUBALLOCATION_TYPE_BUFFER:
2040  return
2041  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2042  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2043  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2044  return
2045  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2046  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2047  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2048  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2049  return
2050  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2051  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2052  return false;
2053  default:
2054  VMA_ASSERT(0);
2055  return true;
2056  }
2057 }
2058 
2059 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
2060 struct VmaMutexLock
2061 {
2062 public:
2063  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
2064  m_pMutex(useMutex ? &mutex : VMA_NULL)
2065  {
2066  if(m_pMutex)
2067  {
2068  m_pMutex->Lock();
2069  }
2070  }
2071 
2072  ~VmaMutexLock()
2073  {
2074  if(m_pMutex)
2075  {
2076  m_pMutex->Unlock();
2077  }
2078  }
2079 
2080 private:
2081  VMA_MUTEX* m_pMutex;
2082 };
2083 
2084 #if VMA_DEBUG_GLOBAL_MUTEX
2085  static VMA_MUTEX gDebugGlobalMutex;
2086  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
2087 #else
2088  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
2089 #endif
2090 
2091 // Minimum size of a free suballocation to register it in the free suballocation collection.
2092 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2093 
2094 /*
2095 Performs binary search and returns iterator to first element that is greater or
2096 equal to (key), according to comparison (cmp).
2097 
2098 Cmp should return true if first argument is less than second argument.
2099 
2100 Returned value is the found element, if present in the collection or place where
2101 new element with value (key) should be inserted.
2102 */
2103 template <typename IterT, typename KeyT, typename CmpT>
2104 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
2105 {
2106  size_t down = 0, up = (end - beg);
2107  while(down < up)
2108  {
2109  const size_t mid = (down + up) / 2;
2110  if(cmp(*(beg+mid), key))
2111  {
2112  down = mid + 1;
2113  }
2114  else
2115  {
2116  up = mid;
2117  }
2118  }
2119  return beg + down;
2120 }
2121 
2123 // Memory allocation
2124 
2125 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
2126 {
2127  if((pAllocationCallbacks != VMA_NULL) &&
2128  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2129  {
2130  return (*pAllocationCallbacks->pfnAllocation)(
2131  pAllocationCallbacks->pUserData,
2132  size,
2133  alignment,
2134  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2135  }
2136  else
2137  {
2138  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2139  }
2140 }
2141 
2142 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
2143 {
2144  if((pAllocationCallbacks != VMA_NULL) &&
2145  (pAllocationCallbacks->pfnFree != VMA_NULL))
2146  {
2147  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2148  }
2149  else
2150  {
2151  VMA_SYSTEM_FREE(ptr);
2152  }
2153 }
2154 
2155 template<typename T>
2156 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
2157 {
2158  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
2159 }
2160 
2161 template<typename T>
2162 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
2163 {
2164  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
2165 }
2166 
2167 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
2168 
2169 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
2170 
2171 template<typename T>
2172 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2173 {
2174  ptr->~T();
2175  VmaFree(pAllocationCallbacks, ptr);
2176 }
2177 
2178 template<typename T>
2179 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
2180 {
2181  if(ptr != VMA_NULL)
2182  {
2183  for(size_t i = count; i--; )
2184  {
2185  ptr[i].~T();
2186  }
2187  VmaFree(pAllocationCallbacks, ptr);
2188  }
2189 }
2190 
2191 // STL-compatible allocator.
2192 template<typename T>
2193 class VmaStlAllocator
2194 {
2195 public:
2196  const VkAllocationCallbacks* const m_pCallbacks;
2197  typedef T value_type;
2198 
2199  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2200  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2201 
2202  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
2203  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
2204 
2205  template<typename U>
2206  bool operator==(const VmaStlAllocator<U>& rhs) const
2207  {
2208  return m_pCallbacks == rhs.m_pCallbacks;
2209  }
2210  template<typename U>
2211  bool operator!=(const VmaStlAllocator<U>& rhs) const
2212  {
2213  return m_pCallbacks != rhs.m_pCallbacks;
2214  }
2215 
2216  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
2217 };
2218 
2219 #if VMA_USE_STL_VECTOR
2220 
2221 #define VmaVector std::vector
2222 
2223 template<typename T, typename allocatorT>
2224 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
2225 {
2226  vec.insert(vec.begin() + index, item);
2227 }
2228 
2229 template<typename T, typename allocatorT>
2230 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
2231 {
2232  vec.erase(vec.begin() + index);
2233 }
2234 
2235 #else // #if VMA_USE_STL_VECTOR
2236 
2237 /* Class with interface compatible with subset of std::vector.
2238 T must be POD because constructors and destructors are not called and memcpy is
2239 used for these objects. */
2240 template<typename T, typename AllocatorT>
2241 class VmaVector
2242 {
2243 public:
2244  typedef T value_type;
2245 
2246  VmaVector(const AllocatorT& allocator) :
2247  m_Allocator(allocator),
2248  m_pArray(VMA_NULL),
2249  m_Count(0),
2250  m_Capacity(0)
2251  {
2252  }
2253 
2254  VmaVector(size_t count, const AllocatorT& allocator) :
2255  m_Allocator(allocator),
2256  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2257  m_Count(count),
2258  m_Capacity(count)
2259  {
2260  }
2261 
2262  VmaVector(const VmaVector<T, AllocatorT>& src) :
2263  m_Allocator(src.m_Allocator),
2264  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2265  m_Count(src.m_Count),
2266  m_Capacity(src.m_Count)
2267  {
2268  if(m_Count != 0)
2269  {
2270  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
2271  }
2272  }
2273 
2274  ~VmaVector()
2275  {
2276  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2277  }
2278 
2279  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
2280  {
2281  if(&rhs != this)
2282  {
2283  resize(rhs.m_Count);
2284  if(m_Count != 0)
2285  {
2286  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
2287  }
2288  }
2289  return *this;
2290  }
2291 
2292  bool empty() const { return m_Count == 0; }
2293  size_t size() const { return m_Count; }
2294  T* data() { return m_pArray; }
2295  const T* data() const { return m_pArray; }
2296 
2297  T& operator[](size_t index)
2298  {
2299  VMA_HEAVY_ASSERT(index < m_Count);
2300  return m_pArray[index];
2301  }
2302  const T& operator[](size_t index) const
2303  {
2304  VMA_HEAVY_ASSERT(index < m_Count);
2305  return m_pArray[index];
2306  }
2307 
2308  T& front()
2309  {
2310  VMA_HEAVY_ASSERT(m_Count > 0);
2311  return m_pArray[0];
2312  }
2313  const T& front() const
2314  {
2315  VMA_HEAVY_ASSERT(m_Count > 0);
2316  return m_pArray[0];
2317  }
2318  T& back()
2319  {
2320  VMA_HEAVY_ASSERT(m_Count > 0);
2321  return m_pArray[m_Count - 1];
2322  }
2323  const T& back() const
2324  {
2325  VMA_HEAVY_ASSERT(m_Count > 0);
2326  return m_pArray[m_Count - 1];
2327  }
2328 
2329  void reserve(size_t newCapacity, bool freeMemory = false)
2330  {
2331  newCapacity = VMA_MAX(newCapacity, m_Count);
2332 
2333  if((newCapacity < m_Capacity) && !freeMemory)
2334  {
2335  newCapacity = m_Capacity;
2336  }
2337 
2338  if(newCapacity != m_Capacity)
2339  {
2340  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2341  if(m_Count != 0)
2342  {
2343  memcpy(newArray, m_pArray, m_Count * sizeof(T));
2344  }
2345  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2346  m_Capacity = newCapacity;
2347  m_pArray = newArray;
2348  }
2349  }
2350 
2351  void resize(size_t newCount, bool freeMemory = false)
2352  {
2353  size_t newCapacity = m_Capacity;
2354  if(newCount > m_Capacity)
2355  {
2356  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
2357  }
2358  else if(freeMemory)
2359  {
2360  newCapacity = newCount;
2361  }
2362 
2363  if(newCapacity != m_Capacity)
2364  {
2365  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2366  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2367  if(elementsToCopy != 0)
2368  {
2369  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
2370  }
2371  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2372  m_Capacity = newCapacity;
2373  m_pArray = newArray;
2374  }
2375 
2376  m_Count = newCount;
2377  }
2378 
2379  void clear(bool freeMemory = false)
2380  {
2381  resize(0, freeMemory);
2382  }
2383 
2384  void insert(size_t index, const T& src)
2385  {
2386  VMA_HEAVY_ASSERT(index <= m_Count);
2387  const size_t oldCount = size();
2388  resize(oldCount + 1);
2389  if(index < oldCount)
2390  {
2391  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
2392  }
2393  m_pArray[index] = src;
2394  }
2395 
2396  void remove(size_t index)
2397  {
2398  VMA_HEAVY_ASSERT(index < m_Count);
2399  const size_t oldCount = size();
2400  if(index < oldCount - 1)
2401  {
2402  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
2403  }
2404  resize(oldCount - 1);
2405  }
2406 
2407  void push_back(const T& src)
2408  {
2409  const size_t newIndex = size();
2410  resize(newIndex + 1);
2411  m_pArray[newIndex] = src;
2412  }
2413 
2414  void pop_back()
2415  {
2416  VMA_HEAVY_ASSERT(m_Count > 0);
2417  resize(size() - 1);
2418  }
2419 
2420  void push_front(const T& src)
2421  {
2422  insert(0, src);
2423  }
2424 
2425  void pop_front()
2426  {
2427  VMA_HEAVY_ASSERT(m_Count > 0);
2428  remove(0);
2429  }
2430 
2431  typedef T* iterator;
2432 
2433  iterator begin() { return m_pArray; }
2434  iterator end() { return m_pArray + m_Count; }
2435 
2436 private:
2437  AllocatorT m_Allocator;
2438  T* m_pArray;
2439  size_t m_Count;
2440  size_t m_Capacity;
2441 };
2442 
2443 template<typename T, typename allocatorT>
2444 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
2445 {
2446  vec.insert(index, item);
2447 }
2448 
2449 template<typename T, typename allocatorT>
2450 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
2451 {
2452  vec.remove(index);
2453 }
2454 
2455 #endif // #if VMA_USE_STL_VECTOR
2456 
2457 template<typename CmpLess, typename VectorT>
2458 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
2459 {
2460  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2461  vector.data(),
2462  vector.data() + vector.size(),
2463  value,
2464  CmpLess()) - vector.data();
2465  VmaVectorInsert(vector, indexToInsert, value);
2466  return indexToInsert;
2467 }
2468 
2469 template<typename CmpLess, typename VectorT>
2470 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
2471 {
2472  CmpLess comparator;
2473  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2474  vector.begin(),
2475  vector.end(),
2476  value,
2477  comparator);
2478  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2479  {
2480  size_t indexToRemove = it - vector.begin();
2481  VmaVectorRemove(vector, indexToRemove);
2482  return true;
2483  }
2484  return false;
2485 }
2486 
2487 template<typename CmpLess, typename VectorT>
2488 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
2489 {
2490  CmpLess comparator;
2491  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2492  vector.data(),
2493  vector.data() + vector.size(),
2494  value,
2495  comparator);
2496  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2497  {
2498  return it - vector.begin();
2499  }
2500  else
2501  {
2502  return vector.size();
2503  }
2504 }
2505 
2507 // class VmaPoolAllocator
2508 
2509 /*
2510 Allocator for objects of type T using a list of arrays (pools) to speed up
2511 allocation. Number of elements that can be allocated is not bounded because
2512 allocator can create multiple blocks.
2513 */
2514 template<typename T>
2515 class VmaPoolAllocator
2516 {
2517 public:
2518  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
2519  ~VmaPoolAllocator();
2520  void Clear();
2521  T* Alloc();
2522  void Free(T* ptr);
2523 
2524 private:
2525  union Item
2526  {
2527  uint32_t NextFreeIndex;
2528  T Value;
2529  };
2530 
2531  struct ItemBlock
2532  {
2533  Item* pItems;
2534  uint32_t FirstFreeIndex;
2535  };
2536 
2537  const VkAllocationCallbacks* m_pAllocationCallbacks;
2538  size_t m_ItemsPerBlock;
2539  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2540 
2541  ItemBlock& CreateNewBlock();
2542 };
2543 
2544 template<typename T>
2545 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
2546  m_pAllocationCallbacks(pAllocationCallbacks),
2547  m_ItemsPerBlock(itemsPerBlock),
2548  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2549 {
2550  VMA_ASSERT(itemsPerBlock > 0);
2551 }
2552 
2553 template<typename T>
2554 VmaPoolAllocator<T>::~VmaPoolAllocator()
2555 {
2556  Clear();
2557 }
2558 
2559 template<typename T>
2560 void VmaPoolAllocator<T>::Clear()
2561 {
2562  for(size_t i = m_ItemBlocks.size(); i--; )
2563  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2564  m_ItemBlocks.clear();
2565 }
2566 
2567 template<typename T>
2568 T* VmaPoolAllocator<T>::Alloc()
2569 {
2570  for(size_t i = m_ItemBlocks.size(); i--; )
2571  {
2572  ItemBlock& block = m_ItemBlocks[i];
2573  // This block has some free items: Use first one.
2574  if(block.FirstFreeIndex != UINT32_MAX)
2575  {
2576  Item* const pItem = &block.pItems[block.FirstFreeIndex];
2577  block.FirstFreeIndex = pItem->NextFreeIndex;
2578  return &pItem->Value;
2579  }
2580  }
2581 
2582  // No block has free item: Create new one and use it.
2583  ItemBlock& newBlock = CreateNewBlock();
2584  Item* const pItem = &newBlock.pItems[0];
2585  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2586  return &pItem->Value;
2587 }
2588 
2589 template<typename T>
2590 void VmaPoolAllocator<T>::Free(T* ptr)
2591 {
2592  // Search all memory blocks to find ptr.
2593  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
2594  {
2595  ItemBlock& block = m_ItemBlocks[i];
2596 
2597  // Casting to union.
2598  Item* pItemPtr;
2599  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
2600 
2601  // Check if pItemPtr is in address range of this block.
2602  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2603  {
2604  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
2605  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2606  block.FirstFreeIndex = index;
2607  return;
2608  }
2609  }
2610  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
2611 }
2612 
2613 template<typename T>
2614 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2615 {
2616  ItemBlock newBlock = {
2617  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2618 
2619  m_ItemBlocks.push_back(newBlock);
2620 
2621  // Setup singly-linked list of all free items in this block.
2622  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2623  newBlock.pItems[i].NextFreeIndex = i + 1;
2624  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2625  return m_ItemBlocks.back();
2626 }
2627 
2629 // class VmaRawList, VmaList
2630 
2631 #if VMA_USE_STL_LIST
2632 
2633 #define VmaList std::list
2634 
2635 #else // #if VMA_USE_STL_LIST
2636 
2637 template<typename T>
2638 struct VmaListItem
2639 {
2640  VmaListItem* pPrev;
2641  VmaListItem* pNext;
2642  T Value;
2643 };
2644 
2645 // Doubly linked list.
2646 template<typename T>
2647 class VmaRawList
2648 {
2649 public:
2650  typedef VmaListItem<T> ItemType;
2651 
2652  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
2653  ~VmaRawList();
2654  void Clear();
2655 
2656  size_t GetCount() const { return m_Count; }
2657  bool IsEmpty() const { return m_Count == 0; }
2658 
2659  ItemType* Front() { return m_pFront; }
2660  const ItemType* Front() const { return m_pFront; }
2661  ItemType* Back() { return m_pBack; }
2662  const ItemType* Back() const { return m_pBack; }
2663 
2664  ItemType* PushBack();
2665  ItemType* PushFront();
2666  ItemType* PushBack(const T& value);
2667  ItemType* PushFront(const T& value);
2668  void PopBack();
2669  void PopFront();
2670 
2671  // Item can be null - it means PushBack.
2672  ItemType* InsertBefore(ItemType* pItem);
2673  // Item can be null - it means PushFront.
2674  ItemType* InsertAfter(ItemType* pItem);
2675 
2676  ItemType* InsertBefore(ItemType* pItem, const T& value);
2677  ItemType* InsertAfter(ItemType* pItem, const T& value);
2678 
2679  void Remove(ItemType* pItem);
2680 
2681 private:
2682  const VkAllocationCallbacks* const m_pAllocationCallbacks;
2683  VmaPoolAllocator<ItemType> m_ItemAllocator;
2684  ItemType* m_pFront;
2685  ItemType* m_pBack;
2686  size_t m_Count;
2687 
2688  // Declared not defined, to block copy constructor and assignment operator.
2689  VmaRawList(const VmaRawList<T>& src);
2690  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
2691 };
2692 
2693 template<typename T>
2694 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
2695  m_pAllocationCallbacks(pAllocationCallbacks),
2696  m_ItemAllocator(pAllocationCallbacks, 128),
2697  m_pFront(VMA_NULL),
2698  m_pBack(VMA_NULL),
2699  m_Count(0)
2700 {
2701 }
2702 
2703 template<typename T>
2704 VmaRawList<T>::~VmaRawList()
2705 {
2706  // Intentionally not calling Clear, because that would be unnecessary
2707  // computations to return all items to m_ItemAllocator as free.
2708 }
2709 
2710 template<typename T>
2711 void VmaRawList<T>::Clear()
2712 {
2713  if(IsEmpty() == false)
2714  {
2715  ItemType* pItem = m_pBack;
2716  while(pItem != VMA_NULL)
2717  {
2718  ItemType* const pPrevItem = pItem->pPrev;
2719  m_ItemAllocator.Free(pItem);
2720  pItem = pPrevItem;
2721  }
2722  m_pFront = VMA_NULL;
2723  m_pBack = VMA_NULL;
2724  m_Count = 0;
2725  }
2726 }
2727 
2728 template<typename T>
2729 VmaListItem<T>* VmaRawList<T>::PushBack()
2730 {
2731  ItemType* const pNewItem = m_ItemAllocator.Alloc();
2732  pNewItem->pNext = VMA_NULL;
2733  if(IsEmpty())
2734  {
2735  pNewItem->pPrev = VMA_NULL;
2736  m_pFront = pNewItem;
2737  m_pBack = pNewItem;
2738  m_Count = 1;
2739  }
2740  else
2741  {
2742  pNewItem->pPrev = m_pBack;
2743  m_pBack->pNext = pNewItem;
2744  m_pBack = pNewItem;
2745  ++m_Count;
2746  }
2747  return pNewItem;
2748 }
2749 
2750 template<typename T>
2751 VmaListItem<T>* VmaRawList<T>::PushFront()
2752 {
2753  ItemType* const pNewItem = m_ItemAllocator.Alloc();
2754  pNewItem->pPrev = VMA_NULL;
2755  if(IsEmpty())
2756  {
2757  pNewItem->pNext = VMA_NULL;
2758  m_pFront = pNewItem;
2759  m_pBack = pNewItem;
2760  m_Count = 1;
2761  }
2762  else
2763  {
2764  pNewItem->pNext = m_pFront;
2765  m_pFront->pPrev = pNewItem;
2766  m_pFront = pNewItem;
2767  ++m_Count;
2768  }
2769  return pNewItem;
2770 }
2771 
2772 template<typename T>
2773 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
2774 {
2775  ItemType* const pNewItem = PushBack();
2776  pNewItem->Value = value;
2777  return pNewItem;
2778 }
2779 
2780 template<typename T>
2781 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
2782 {
2783  ItemType* const pNewItem = PushFront();
2784  pNewItem->Value = value;
2785  return pNewItem;
2786 }
2787 
2788 template<typename T>
2789 void VmaRawList<T>::PopBack()
2790 {
2791  VMA_HEAVY_ASSERT(m_Count > 0);
2792  ItemType* const pBackItem = m_pBack;
2793  ItemType* const pPrevItem = pBackItem->pPrev;
2794  if(pPrevItem != VMA_NULL)
2795  {
2796  pPrevItem->pNext = VMA_NULL;
2797  }
2798  m_pBack = pPrevItem;
2799  m_ItemAllocator.Free(pBackItem);
2800  --m_Count;
2801 }
2802 
2803 template<typename T>
2804 void VmaRawList<T>::PopFront()
2805 {
2806  VMA_HEAVY_ASSERT(m_Count > 0);
2807  ItemType* const pFrontItem = m_pFront;
2808  ItemType* const pNextItem = pFrontItem->pNext;
2809  if(pNextItem != VMA_NULL)
2810  {
2811  pNextItem->pPrev = VMA_NULL;
2812  }
2813  m_pFront = pNextItem;
2814  m_ItemAllocator.Free(pFrontItem);
2815  --m_Count;
2816 }
2817 
2818 template<typename T>
2819 void VmaRawList<T>::Remove(ItemType* pItem)
2820 {
2821  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2822  VMA_HEAVY_ASSERT(m_Count > 0);
2823 
2824  if(pItem->pPrev != VMA_NULL)
2825  {
2826  pItem->pPrev->pNext = pItem->pNext;
2827  }
2828  else
2829  {
2830  VMA_HEAVY_ASSERT(m_pFront == pItem);
2831  m_pFront = pItem->pNext;
2832  }
2833 
2834  if(pItem->pNext != VMA_NULL)
2835  {
2836  pItem->pNext->pPrev = pItem->pPrev;
2837  }
2838  else
2839  {
2840  VMA_HEAVY_ASSERT(m_pBack == pItem);
2841  m_pBack = pItem->pPrev;
2842  }
2843 
2844  m_ItemAllocator.Free(pItem);
2845  --m_Count;
2846 }
2847 
2848 template<typename T>
2849 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2850 {
2851  if(pItem != VMA_NULL)
2852  {
2853  ItemType* const prevItem = pItem->pPrev;
2854  ItemType* const newItem = m_ItemAllocator.Alloc();
2855  newItem->pPrev = prevItem;
2856  newItem->pNext = pItem;
2857  pItem->pPrev = newItem;
2858  if(prevItem != VMA_NULL)
2859  {
2860  prevItem->pNext = newItem;
2861  }
2862  else
2863  {
2864  VMA_HEAVY_ASSERT(m_pFront == pItem);
2865  m_pFront = newItem;
2866  }
2867  ++m_Count;
2868  return newItem;
2869  }
2870  else
2871  return PushBack();
2872 }
2873 
2874 template<typename T>
2875 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2876 {
2877  if(pItem != VMA_NULL)
2878  {
2879  ItemType* const nextItem = pItem->pNext;
2880  ItemType* const newItem = m_ItemAllocator.Alloc();
2881  newItem->pNext = nextItem;
2882  newItem->pPrev = pItem;
2883  pItem->pNext = newItem;
2884  if(nextItem != VMA_NULL)
2885  {
2886  nextItem->pPrev = newItem;
2887  }
2888  else
2889  {
2890  VMA_HEAVY_ASSERT(m_pBack == pItem);
2891  m_pBack = newItem;
2892  }
2893  ++m_Count;
2894  return newItem;
2895  }
2896  else
2897  return PushFront();
2898 }
2899 
2900 template<typename T>
2901 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
2902 {
2903  ItemType* const newItem = InsertBefore(pItem);
2904  newItem->Value = value;
2905  return newItem;
2906 }
2907 
2908 template<typename T>
2909 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
2910 {
2911  ItemType* const newItem = InsertAfter(pItem);
2912  newItem->Value = value;
2913  return newItem;
2914 }
2915 
2916 template<typename T, typename AllocatorT>
2917 class VmaList
2918 {
2919 public:
2920  class iterator
2921  {
2922  public:
2923  iterator() :
2924  m_pList(VMA_NULL),
2925  m_pItem(VMA_NULL)
2926  {
2927  }
2928 
2929  T& operator*() const
2930  {
2931  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2932  return m_pItem->Value;
2933  }
2934  T* operator->() const
2935  {
2936  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2937  return &m_pItem->Value;
2938  }
2939 
2940  iterator& operator++()
2941  {
2942  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2943  m_pItem = m_pItem->pNext;
2944  return *this;
2945  }
2946  iterator& operator--()
2947  {
2948  if(m_pItem != VMA_NULL)
2949  {
2950  m_pItem = m_pItem->pPrev;
2951  }
2952  else
2953  {
2954  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2955  m_pItem = m_pList->Back();
2956  }
2957  return *this;
2958  }
2959 
2960  iterator operator++(int)
2961  {
2962  iterator result = *this;
2963  ++*this;
2964  return result;
2965  }
2966  iterator operator--(int)
2967  {
2968  iterator result = *this;
2969  --*this;
2970  return result;
2971  }
2972 
2973  bool operator==(const iterator& rhs) const
2974  {
2975  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2976  return m_pItem == rhs.m_pItem;
2977  }
2978  bool operator!=(const iterator& rhs) const
2979  {
2980  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2981  return m_pItem != rhs.m_pItem;
2982  }
2983 
2984  private:
2985  VmaRawList<T>* m_pList;
2986  VmaListItem<T>* m_pItem;
2987 
2988  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2989  m_pList(pList),
2990  m_pItem(pItem)
2991  {
2992  }
2993 
2994  friend class VmaList<T, AllocatorT>;
2995  };
2996 
2997  class const_iterator
2998  {
2999  public:
3000  const_iterator() :
3001  m_pList(VMA_NULL),
3002  m_pItem(VMA_NULL)
3003  {
3004  }
3005 
3006  const_iterator(const iterator& src) :
3007  m_pList(src.m_pList),
3008  m_pItem(src.m_pItem)
3009  {
3010  }
3011 
3012  const T& operator*() const
3013  {
3014  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3015  return m_pItem->Value;
3016  }
3017  const T* operator->() const
3018  {
3019  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3020  return &m_pItem->Value;
3021  }
3022 
3023  const_iterator& operator++()
3024  {
3025  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3026  m_pItem = m_pItem->pNext;
3027  return *this;
3028  }
3029  const_iterator& operator--()
3030  {
3031  if(m_pItem != VMA_NULL)
3032  {
3033  m_pItem = m_pItem->pPrev;
3034  }
3035  else
3036  {
3037  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3038  m_pItem = m_pList->Back();
3039  }
3040  return *this;
3041  }
3042 
3043  const_iterator operator++(int)
3044  {
3045  const_iterator result = *this;
3046  ++*this;
3047  return result;
3048  }
3049  const_iterator operator--(int)
3050  {
3051  const_iterator result = *this;
3052  --*this;
3053  return result;
3054  }
3055 
3056  bool operator==(const const_iterator& rhs) const
3057  {
3058  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3059  return m_pItem == rhs.m_pItem;
3060  }
3061  bool operator!=(const const_iterator& rhs) const
3062  {
3063  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3064  return m_pItem != rhs.m_pItem;
3065  }
3066 
3067  private:
3068  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
3069  m_pList(pList),
3070  m_pItem(pItem)
3071  {
3072  }
3073 
3074  const VmaRawList<T>* m_pList;
3075  const VmaListItem<T>* m_pItem;
3076 
3077  friend class VmaList<T, AllocatorT>;
3078  };
3079 
3080  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3081 
3082  bool empty() const { return m_RawList.IsEmpty(); }
3083  size_t size() const { return m_RawList.GetCount(); }
3084 
3085  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
3086  iterator end() { return iterator(&m_RawList, VMA_NULL); }
3087 
3088  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
3089  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
3090 
3091  void clear() { m_RawList.Clear(); }
3092  void push_back(const T& value) { m_RawList.PushBack(value); }
3093  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3094  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3095 
3096 private:
3097  VmaRawList<T> m_RawList;
3098 };
3099 
3100 #endif // #if VMA_USE_STL_LIST
3101 
3103 // class VmaMap
3104 
3105 // Unused in this version.
3106 #if 0
3107 
3108 #if VMA_USE_STL_UNORDERED_MAP
3109 
3110 #define VmaPair std::pair
3111 
3112 #define VMA_MAP_TYPE(KeyT, ValueT) \
3113  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
3114 
3115 #else // #if VMA_USE_STL_UNORDERED_MAP
3116 
3117 template<typename T1, typename T2>
3118 struct VmaPair
3119 {
3120  T1 first;
3121  T2 second;
3122 
3123  VmaPair() : first(), second() { }
3124  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3125 };
3126 
3127 /* Class compatible with subset of interface of std::unordered_map.
3128 KeyT, ValueT must be POD because they will be stored in VmaVector.
3129 */
3130 template<typename KeyT, typename ValueT>
3131 class VmaMap
3132 {
3133 public:
3134  typedef VmaPair<KeyT, ValueT> PairType;
3135  typedef PairType* iterator;
3136 
3137  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3138 
3139  iterator begin() { return m_Vector.begin(); }
3140  iterator end() { return m_Vector.end(); }
3141 
3142  void insert(const PairType& pair);
3143  iterator find(const KeyT& key);
3144  void erase(iterator it);
3145 
3146 private:
3147  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3148 };
3149 
3150 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
3151 
3152 template<typename FirstT, typename SecondT>
3153 struct VmaPairFirstLess
3154 {
3155  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
3156  {
3157  return lhs.first < rhs.first;
3158  }
3159  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
3160  {
3161  return lhs.first < rhsFirst;
3162  }
3163 };
3164 
3165 template<typename KeyT, typename ValueT>
3166 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
3167 {
3168  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3169  m_Vector.data(),
3170  m_Vector.data() + m_Vector.size(),
3171  pair,
3172  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3173  VmaVectorInsert(m_Vector, indexToInsert, pair);
3174 }
3175 
3176 template<typename KeyT, typename ValueT>
3177 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
3178 {
3179  PairType* it = VmaBinaryFindFirstNotLess(
3180  m_Vector.data(),
3181  m_Vector.data() + m_Vector.size(),
3182  key,
3183  VmaPairFirstLess<KeyT, ValueT>());
3184  if((it != m_Vector.end()) && (it->first == key))
3185  {
3186  return it;
3187  }
3188  else
3189  {
3190  return m_Vector.end();
3191  }
3192 }
3193 
3194 template<typename KeyT, typename ValueT>
3195 void VmaMap<KeyT, ValueT>::erase(iterator it)
3196 {
3197  VmaVectorRemove(m_Vector, it - m_Vector.begin());
3198 }
3199 
3200 #endif // #if VMA_USE_STL_UNORDERED_MAP
3201 
3202 #endif // #if 0
3203 
3205 
3206 class VmaDeviceMemoryBlock;
3207 
3208 struct VmaAllocation_T
3209 {
3210 private:
3211  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3212 
3213  enum FLAGS
3214  {
3215  FLAG_USER_DATA_STRING = 0x01,
3216  };
3217 
3218 public:
3219  enum ALLOCATION_TYPE
3220  {
3221  ALLOCATION_TYPE_NONE,
3222  ALLOCATION_TYPE_BLOCK,
3223  ALLOCATION_TYPE_DEDICATED,
3224  };
3225 
3226  VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
3227  m_Alignment(1),
3228  m_Size(0),
3229  m_pUserData(VMA_NULL),
3230  m_LastUseFrameIndex(currentFrameIndex),
3231  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3232  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3233  m_MapCount(0),
3234  m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3235  {
3236  }
3237 
3238  ~VmaAllocation_T()
3239  {
3240  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
3241 
3242  // Check if owned string was freed.
3243  VMA_ASSERT(m_pUserData == VMA_NULL);
3244  }
3245 
3246  void InitBlockAllocation(
3247  VmaPool hPool,
3248  VmaDeviceMemoryBlock* block,
3249  VkDeviceSize offset,
3250  VkDeviceSize alignment,
3251  VkDeviceSize size,
3252  VmaSuballocationType suballocationType,
3253  bool mapped,
3254  bool canBecomeLost)
3255  {
3256  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3257  VMA_ASSERT(block != VMA_NULL);
3258  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3259  m_Alignment = alignment;
3260  m_Size = size;
3261  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3262  m_SuballocationType = (uint8_t)suballocationType;
3263  m_BlockAllocation.m_hPool = hPool;
3264  m_BlockAllocation.m_Block = block;
3265  m_BlockAllocation.m_Offset = offset;
3266  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3267  }
3268 
3269  void InitLost()
3270  {
3271  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3272  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3273  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3274  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3275  m_BlockAllocation.m_Block = VMA_NULL;
3276  m_BlockAllocation.m_Offset = 0;
3277  m_BlockAllocation.m_CanBecomeLost = true;
3278  }
3279 
3280  void ChangeBlockAllocation(
3281  VmaAllocator hAllocator,
3282  VmaDeviceMemoryBlock* block,
3283  VkDeviceSize offset);
3284 
3285  // pMappedData not null means allocation is created with MAPPED flag.
3286  void InitDedicatedAllocation(
3287  uint32_t memoryTypeIndex,
3288  VkDeviceMemory hMemory,
3289  VmaSuballocationType suballocationType,
3290  void* pMappedData,
3291  VkDeviceSize size)
3292  {
3293  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3294  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3295  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3296  m_Alignment = 0;
3297  m_Size = size;
3298  m_SuballocationType = (uint8_t)suballocationType;
3299  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3300  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3301  m_DedicatedAllocation.m_hMemory = hMemory;
3302  m_DedicatedAllocation.m_pMappedData = pMappedData;
3303  }
3304 
3305  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
3306  VkDeviceSize GetAlignment() const { return m_Alignment; }
3307  VkDeviceSize GetSize() const { return m_Size; }
3308  bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3309  void* GetUserData() const { return m_pUserData; }
3310  void SetUserData(VmaAllocator hAllocator, void* pUserData);
3311  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
3312 
3313  VmaDeviceMemoryBlock* GetBlock() const
3314  {
3315  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3316  return m_BlockAllocation.m_Block;
3317  }
3318  VkDeviceSize GetOffset() const;
3319  VkDeviceMemory GetMemory() const;
3320  uint32_t GetMemoryTypeIndex() const;
3321  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3322  void* GetMappedData() const;
3323  bool CanBecomeLost() const;
3324  VmaPool GetPool() const;
3325 
3326  uint32_t GetLastUseFrameIndex() const
3327  {
3328  return m_LastUseFrameIndex.load();
3329  }
3330  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3331  {
3332  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3333  }
3334  /*
3335  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
3336  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
3337  - Else, returns false.
3338 
3339  If hAllocation is already lost, assert - you should not call it then.
3340  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
3341  */
3342  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3343 
3344  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
3345  {
3346  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3347  outInfo.blockCount = 1;
3348  outInfo.allocationCount = 1;
3349  outInfo.unusedRangeCount = 0;
3350  outInfo.usedBytes = m_Size;
3351  outInfo.unusedBytes = 0;
3352  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
3353  outInfo.unusedRangeSizeMin = UINT64_MAX;
3354  outInfo.unusedRangeSizeMax = 0;
3355  }
3356 
3357  void BlockAllocMap();
3358  void BlockAllocUnmap();
3359  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
3360  void DedicatedAllocUnmap(VmaAllocator hAllocator);
3361 
3362 private:
3363  VkDeviceSize m_Alignment;
3364  VkDeviceSize m_Size;
3365  void* m_pUserData;
3366  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3367  uint8_t m_Type; // ALLOCATION_TYPE
3368  uint8_t m_SuballocationType; // VmaSuballocationType
3369  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
3370  // Bits with mask 0x7F are reference counter for vmaMapMemory()/vmaUnmapMemory().
3371  uint8_t m_MapCount;
3372  uint8_t m_Flags; // enum FLAGS
3373 
3374  // Allocation out of VmaDeviceMemoryBlock.
3375  struct BlockAllocation
3376  {
3377  VmaPool m_hPool; // Null if belongs to general memory.
3378  VmaDeviceMemoryBlock* m_Block;
3379  VkDeviceSize m_Offset;
3380  bool m_CanBecomeLost;
3381  };
3382 
3383  // Allocation for an object that has its own private VkDeviceMemory.
3384  struct DedicatedAllocation
3385  {
3386  uint32_t m_MemoryTypeIndex;
3387  VkDeviceMemory m_hMemory;
3388  void* m_pMappedData; // Not null means memory is mapped.
3389  };
3390 
3391  union
3392  {
3393  // Allocation out of VmaDeviceMemoryBlock.
3394  BlockAllocation m_BlockAllocation;
3395  // Allocation for an object that has its own private VkDeviceMemory.
3396  DedicatedAllocation m_DedicatedAllocation;
3397  };
3398 
3399  void FreeUserDataString(VmaAllocator hAllocator);
3400 };
3401 
3402 /*
3403 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
3404 allocated memory block or free.
3405 */
3406 struct VmaSuballocation
3407 {
3408  VkDeviceSize offset;
3409  VkDeviceSize size;
3410  VmaAllocation hAllocation;
3411  VmaSuballocationType type;
3412 };
3413 
3414 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3415 
3416 // Cost of one additional allocation lost, as equivalent in bytes.
3417 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3418 
3419 /*
3420 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
3421 
3422 If canMakeOtherLost was false:
3423 - item points to a FREE suballocation.
3424 - itemsToMakeLostCount is 0.
3425 
3426 If canMakeOtherLost was true:
3427 - item points to first of sequence of suballocations, which are either FREE,
3428  or point to VmaAllocations that can become lost.
3429 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
3430  the requested allocation to succeed.
3431 */
3432 struct VmaAllocationRequest
3433 {
3434  VkDeviceSize offset;
3435  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
3436  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
3437  VmaSuballocationList::iterator item;
3438  size_t itemsToMakeLostCount;
3439 
3440  VkDeviceSize CalcCost() const
3441  {
3442  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3443  }
3444 };
3445 
3446 /*
3447 Data structure used for bookkeeping of allocations and unused ranges of memory
3448 in a single VkDeviceMemory block.
3449 */
3450 class VmaBlockMetadata
3451 {
3452 public:
3453  VmaBlockMetadata(VmaAllocator hAllocator);
3454  ~VmaBlockMetadata();
3455  void Init(VkDeviceSize size);
3456 
3457  // Validates all data structures inside this object. If not valid, returns false.
3458  bool Validate() const;
3459  VkDeviceSize GetSize() const { return m_Size; }
3460  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
3461  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
3462  VkDeviceSize GetUnusedRangeSizeMax() const;
3463  // Returns true if this block is empty - contains only single free suballocation.
3464  bool IsEmpty() const;
3465 
3466  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
3467  void AddPoolStats(VmaPoolStats& inoutStats) const;
3468 
3469 #if VMA_STATS_STRING_ENABLED
3470  void PrintDetailedMap(class VmaJsonWriter& json) const;
3471 #endif
3472 
3473  // Creates trivial request for case when block is empty.
3474  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3475 
3476  // Tries to find a place for suballocation with given parameters inside this block.
3477  // If succeeded, fills pAllocationRequest and returns true.
3478  // If failed, returns false.
3479  bool CreateAllocationRequest(
3480  uint32_t currentFrameIndex,
3481  uint32_t frameInUseCount,
3482  VkDeviceSize bufferImageGranularity,
3483  VkDeviceSize allocSize,
3484  VkDeviceSize allocAlignment,
3485  VmaSuballocationType allocType,
3486  bool canMakeOtherLost,
3487  VmaAllocationRequest* pAllocationRequest);
3488 
3489  bool MakeRequestedAllocationsLost(
3490  uint32_t currentFrameIndex,
3491  uint32_t frameInUseCount,
3492  VmaAllocationRequest* pAllocationRequest);
3493 
3494  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3495 
3496  // Makes actual allocation based on request. Request must already be checked and valid.
3497  void Alloc(
3498  const VmaAllocationRequest& request,
3499  VmaSuballocationType type,
3500  VkDeviceSize allocSize,
3501  VmaAllocation hAllocation);
3502 
3503  // Frees suballocation assigned to given memory region.
3504  void Free(const VmaAllocation allocation);
3505  void FreeAtOffset(VkDeviceSize offset);
3506 
3507 private:
3508  VkDeviceSize m_Size;
3509  uint32_t m_FreeCount;
3510  VkDeviceSize m_SumFreeSize;
3511  VmaSuballocationList m_Suballocations;
3512  // Suballocations that are free and have size greater than certain threshold.
3513  // Sorted by size, ascending.
3514  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3515 
3516  bool ValidateFreeSuballocationList() const;
3517 
3518  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
3519  // If yes, fills pOffset and returns true. If no, returns false.
3520  bool CheckAllocation(
3521  uint32_t currentFrameIndex,
3522  uint32_t frameInUseCount,
3523  VkDeviceSize bufferImageGranularity,
3524  VkDeviceSize allocSize,
3525  VkDeviceSize allocAlignment,
3526  VmaSuballocationType allocType,
3527  VmaSuballocationList::const_iterator suballocItem,
3528  bool canMakeOtherLost,
3529  VkDeviceSize* pOffset,
3530  size_t* itemsToMakeLostCount,
3531  VkDeviceSize* pSumFreeSize,
3532  VkDeviceSize* pSumItemSize) const;
3533  // Given free suballocation, it merges it with following one, which must also be free.
3534  void MergeFreeWithNext(VmaSuballocationList::iterator item);
3535  // Releases given suballocation, making it free.
3536  // Merges it with adjacent free suballocations if applicable.
3537  // Returns iterator to new free suballocation at this place.
3538  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3539  // Given free suballocation, it inserts it into sorted list of
3540  // m_FreeSuballocationsBySize if it's suitable.
3541  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3542  // Given free suballocation, it removes it from sorted list of
3543  // m_FreeSuballocationsBySize if it's suitable.
3544  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3545 };
3546 
3547 // Helper class that represents mapped memory. Synchronized internally.
3548 class VmaDeviceMemoryMapping
3549 {
3550 public:
3551  VmaDeviceMemoryMapping();
3552  ~VmaDeviceMemoryMapping();
3553 
3554  void* GetMappedData() const { return m_pMappedData; }
3555 
3556  // ppData can be null.
3557  VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count, void **ppData);
3558  void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3559 
3560 private:
3561  VMA_MUTEX m_Mutex;
3562  uint32_t m_MapCount;
3563  void* m_pMappedData;
3564 };
3565 
3566 /*
3567 Represents a single block of device memory (`VkDeviceMemory`) with all the
3568 data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
3569 
3570 Thread-safety: This class must be externally synchronized.
3571 */
3572 class VmaDeviceMemoryBlock
3573 {
3574 public:
3575  uint32_t m_MemoryTypeIndex;
3576  VkDeviceMemory m_hMemory;
3577  VmaDeviceMemoryMapping m_Mapping;
3578  VmaBlockMetadata m_Metadata;
3579 
3580  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3581 
3582  ~VmaDeviceMemoryBlock()
3583  {
3584  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3585  }
3586 
3587  // Always call after construction.
3588  void Init(
3589  uint32_t newMemoryTypeIndex,
3590  VkDeviceMemory newMemory,
3591  VkDeviceSize newSize);
3592  // Always call before destruction.
3593  void Destroy(VmaAllocator allocator);
3594 
3595  // Validates all data structures inside this object. If not valid, returns false.
3596  bool Validate() const;
3597 
3598  // ppData can be null.
3599  VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData);
3600  void Unmap(VmaAllocator hAllocator, uint32_t count);
3601 };
3602 
3603 struct VmaPointerLess
3604 {
3605  bool operator()(const void* lhs, const void* rhs) const
3606  {
3607  return lhs < rhs;
3608  }
3609 };
3610 
3611 class VmaDefragmentator;
3612 
3613 /*
3614 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
3615 Vulkan memory type.
3616 
3617 Synchronized internally with a mutex.
3618 */
3619 struct VmaBlockVector
3620 {
3621  VmaBlockVector(
3622  VmaAllocator hAllocator,
3623  uint32_t memoryTypeIndex,
3624  VkDeviceSize preferredBlockSize,
3625  size_t minBlockCount,
3626  size_t maxBlockCount,
3627  VkDeviceSize bufferImageGranularity,
3628  uint32_t frameInUseCount,
3629  bool isCustomPool);
3630  ~VmaBlockVector();
3631 
3632  VkResult CreateMinBlocks();
3633 
3634  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
3635  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
3636  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
3637  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
3638 
3639  void GetPoolStats(VmaPoolStats* pStats);
3640 
3641  bool IsEmpty() const { return m_Blocks.empty(); }
3642 
3643  VkResult Allocate(
3644  VmaPool hCurrentPool,
3645  uint32_t currentFrameIndex,
3646  const VkMemoryRequirements& vkMemReq,
3647  const VmaAllocationCreateInfo& createInfo,
3648  VmaSuballocationType suballocType,
3649  VmaAllocation* pAllocation);
3650 
3651  void Free(
3652  VmaAllocation hAllocation);
3653 
3654  // Adds statistics of this BlockVector to pStats.
3655  void AddStats(VmaStats* pStats);
3656 
3657 #if VMA_STATS_STRING_ENABLED
3658  void PrintDetailedMap(class VmaJsonWriter& json);
3659 #endif
3660 
3661  void MakePoolAllocationsLost(
3662  uint32_t currentFrameIndex,
3663  size_t* pLostAllocationCount);
3664 
3665  VmaDefragmentator* EnsureDefragmentator(
3666  VmaAllocator hAllocator,
3667  uint32_t currentFrameIndex);
3668 
3669  VkResult Defragment(
3670  VmaDefragmentationStats* pDefragmentationStats,
3671  VkDeviceSize& maxBytesToMove,
3672  uint32_t& maxAllocationsToMove);
3673 
3674  void DestroyDefragmentator();
3675 
3676 private:
3677  friend class VmaDefragmentator;
3678 
3679  const VmaAllocator m_hAllocator;
3680  const uint32_t m_MemoryTypeIndex;
3681  const VkDeviceSize m_PreferredBlockSize;
3682  const size_t m_MinBlockCount;
3683  const size_t m_MaxBlockCount;
3684  const VkDeviceSize m_BufferImageGranularity;
3685  const uint32_t m_FrameInUseCount;
3686  const bool m_IsCustomPool;
3687  VMA_MUTEX m_Mutex;
3688  // Incrementally sorted by sumFreeSize, ascending.
3689  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3690  /* There can be at most one allocation that is completely empty - a
3691  hysteresis to avoid pessimistic case of alternating creation and destruction
3692  of a VkDeviceMemory. */
3693  bool m_HasEmptyBlock;
3694  VmaDefragmentator* m_pDefragmentator;
3695 
3696  size_t CalcMaxBlockSize() const;
3697 
3698  // Finds and removes given block from vector.
3699  void Remove(VmaDeviceMemoryBlock* pBlock);
3700 
3701  // Performs single step in sorting m_Blocks. They may not be fully sorted
3702  // after this call.
3703  void IncrementallySortBlocks();
3704 
3705  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
3706 };
3707 
3708 struct VmaPool_T
3709 {
3710 public:
3711  VmaBlockVector m_BlockVector;
3712 
3713  // Takes ownership.
3714  VmaPool_T(
3715  VmaAllocator hAllocator,
3716  const VmaPoolCreateInfo& createInfo);
3717  ~VmaPool_T();
3718 
3719  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
3720 
3721 #if VMA_STATS_STRING_ENABLED
3722  //void PrintDetailedMap(class VmaStringBuilder& sb);
3723 #endif
3724 };
3725 
3726 class VmaDefragmentator
3727 {
3728  const VmaAllocator m_hAllocator;
3729  VmaBlockVector* const m_pBlockVector;
3730  uint32_t m_CurrentFrameIndex;
3731  VkDeviceSize m_BytesMoved;
3732  uint32_t m_AllocationsMoved;
3733 
3734  struct AllocationInfo
3735  {
3736  VmaAllocation m_hAllocation;
3737  VkBool32* m_pChanged;
3738 
3739  AllocationInfo() :
3740  m_hAllocation(VK_NULL_HANDLE),
3741  m_pChanged(VMA_NULL)
3742  {
3743  }
3744  };
3745 
3746  struct AllocationInfoSizeGreater
3747  {
3748  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
3749  {
3750  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3751  }
3752  };
3753 
3754  // Used between AddAllocation and Defragment.
3755  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3756 
3757  struct BlockInfo
3758  {
3759  VmaDeviceMemoryBlock* m_pBlock;
3760  bool m_HasNonMovableAllocations;
3761  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3762 
3763  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
3764  m_pBlock(VMA_NULL),
3765  m_HasNonMovableAllocations(true),
3766  m_Allocations(pAllocationCallbacks),
3767  m_pMappedDataForDefragmentation(VMA_NULL)
3768  {
3769  }
3770 
3771  void CalcHasNonMovableAllocations()
3772  {
3773  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3774  const size_t defragmentAllocCount = m_Allocations.size();
3775  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3776  }
3777 
3778  void SortAllocationsBySizeDescecnding()
3779  {
3780  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3781  }
3782 
3783  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
3784  void Unmap(VmaAllocator hAllocator);
3785 
3786  private:
3787  // Not null if mapped for defragmentation only, not originally mapped.
3788  void* m_pMappedDataForDefragmentation;
3789  };
3790 
3791  struct BlockPointerLess
3792  {
3793  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
3794  {
3795  return pLhsBlockInfo->m_pBlock < pRhsBlock;
3796  }
3797  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
3798  {
3799  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3800  }
3801  };
3802 
3803  // 1. Blocks with some non-movable allocations go first.
3804  // 2. Blocks with smaller sumFreeSize go first.
3805  struct BlockInfoCompareMoveDestination
3806  {
3807  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
3808  {
3809  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3810  {
3811  return true;
3812  }
3813  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3814  {
3815  return false;
3816  }
3817  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3818  {
3819  return true;
3820  }
3821  return false;
3822  }
3823  };
3824 
3825  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3826  BlockInfoVector m_Blocks;
3827 
3828  VkResult DefragmentRound(
3829  VkDeviceSize maxBytesToMove,
3830  uint32_t maxAllocationsToMove);
3831 
3832  static bool MoveMakesSense(
3833  size_t dstBlockIndex, VkDeviceSize dstOffset,
3834  size_t srcBlockIndex, VkDeviceSize srcOffset);
3835 
3836 public:
3837  VmaDefragmentator(
3838  VmaAllocator hAllocator,
3839  VmaBlockVector* pBlockVector,
3840  uint32_t currentFrameIndex);
3841 
3842  ~VmaDefragmentator();
3843 
3844  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
3845  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
3846 
3847  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3848 
3849  VkResult Defragment(
3850  VkDeviceSize maxBytesToMove,
3851  uint32_t maxAllocationsToMove);
3852 };
3853 
3854 // Main allocator object.
3855 struct VmaAllocator_T
3856 {
3857  bool m_UseMutex;
3858  bool m_UseKhrDedicatedAllocation;
3859  VkDevice m_hDevice;
3860  bool m_AllocationCallbacksSpecified;
3861  VkAllocationCallbacks m_AllocationCallbacks;
3862  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
3863 
3864  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
3865  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3866  VMA_MUTEX m_HeapSizeLimitMutex;
3867 
3868  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3869  VkPhysicalDeviceMemoryProperties m_MemProps;
3870 
3871  // Default pools.
3872  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3873 
3874  // Each vector is sorted by memory (handle value).
3875  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3876  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3877  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3878 
3879  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
3880  ~VmaAllocator_T();
3881 
3882  const VkAllocationCallbacks* GetAllocationCallbacks() const
3883  {
3884  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3885  }
3886  const VmaVulkanFunctions& GetVulkanFunctions() const
3887  {
3888  return m_VulkanFunctions;
3889  }
3890 
3891  VkDeviceSize GetBufferImageGranularity() const
3892  {
3893  return VMA_MAX(
3894  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3895  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3896  }
3897 
3898  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
3899  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
3900 
3901  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
3902  {
3903  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3904  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3905  }
3906 
3907  void GetBufferMemoryRequirements(
3908  VkBuffer hBuffer,
3909  VkMemoryRequirements& memReq,
3910  bool& requiresDedicatedAllocation,
3911  bool& prefersDedicatedAllocation) const;
3912  void GetImageMemoryRequirements(
3913  VkImage hImage,
3914  VkMemoryRequirements& memReq,
3915  bool& requiresDedicatedAllocation,
3916  bool& prefersDedicatedAllocation) const;
3917 
3918  // Main allocation function.
3919  VkResult AllocateMemory(
3920  const VkMemoryRequirements& vkMemReq,
3921  bool requiresDedicatedAllocation,
3922  bool prefersDedicatedAllocation,
3923  VkBuffer dedicatedBuffer,
3924  VkImage dedicatedImage,
3925  const VmaAllocationCreateInfo& createInfo,
3926  VmaSuballocationType suballocType,
3927  VmaAllocation* pAllocation);
3928 
3929  // Main deallocation function.
3930  void FreeMemory(const VmaAllocation allocation);
3931 
3932  void CalculateStats(VmaStats* pStats);
3933 
3934 #if VMA_STATS_STRING_ENABLED
3935  void PrintDetailedMap(class VmaJsonWriter& json);
3936 #endif
3937 
3938  VkResult Defragment(
3939  VmaAllocation* pAllocations,
3940  size_t allocationCount,
3941  VkBool32* pAllocationsChanged,
3942  const VmaDefragmentationInfo* pDefragmentationInfo,
3943  VmaDefragmentationStats* pDefragmentationStats);
3944 
3945  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
3946 
3947  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
3948  void DestroyPool(VmaPool pool);
3949  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
3950 
3951  void SetCurrentFrameIndex(uint32_t frameIndex);
3952 
3953  void MakePoolAllocationsLost(
3954  VmaPool hPool,
3955  size_t* pLostAllocationCount);
3956 
3957  void CreateLostAllocation(VmaAllocation* pAllocation);
3958 
3959  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3960  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3961 
3962  VkResult Map(VmaAllocation hAllocation, void** ppData);
3963  void Unmap(VmaAllocation hAllocation);
3964 
3965 private:
3966  VkDeviceSize m_PreferredLargeHeapBlockSize;
3967 
3968  VkPhysicalDevice m_PhysicalDevice;
3969  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3970 
3971  VMA_MUTEX m_PoolsMutex;
3972  // Protected by m_PoolsMutex. Sorted by pointer value.
3973  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3974 
3975  VmaVulkanFunctions m_VulkanFunctions;
3976 
3977  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
3978 
3979  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3980 
3981  VkResult AllocateMemoryOfType(
3982  const VkMemoryRequirements& vkMemReq,
3983  bool dedicatedAllocation,
3984  VkBuffer dedicatedBuffer,
3985  VkImage dedicatedImage,
3986  const VmaAllocationCreateInfo& createInfo,
3987  uint32_t memTypeIndex,
3988  VmaSuballocationType suballocType,
3989  VmaAllocation* pAllocation);
3990 
3991  // Allocates and registers new VkDeviceMemory specifically for single allocation.
3992  VkResult AllocateDedicatedMemory(
3993  VkDeviceSize size,
3994  VmaSuballocationType suballocType,
3995  uint32_t memTypeIndex,
3996  bool map,
3997  bool isUserDataString,
3998  void* pUserData,
3999  VkBuffer dedicatedBuffer,
4000  VkImage dedicatedImage,
4001  VmaAllocation* pAllocation);
4002 
4003  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
4004  void FreeDedicatedMemory(VmaAllocation allocation);
4005 };
4006 
4008 // Memory allocation #2 after VmaAllocator_T definition
4009 
4010 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
4011 {
4012  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4013 }
4014 
4015 static void VmaFree(VmaAllocator hAllocator, void* ptr)
4016 {
4017  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4018 }
4019 
4020 template<typename T>
4021 static T* VmaAllocate(VmaAllocator hAllocator)
4022 {
4023  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
4024 }
4025 
4026 template<typename T>
4027 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
4028 {
4029  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
4030 }
4031 
4032 template<typename T>
4033 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4034 {
4035  if(ptr != VMA_NULL)
4036  {
4037  ptr->~T();
4038  VmaFree(hAllocator, ptr);
4039  }
4040 }
4041 
4042 template<typename T>
4043 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
4044 {
4045  if(ptr != VMA_NULL)
4046  {
4047  for(size_t i = count; i--; )
4048  ptr[i].~T();
4049  VmaFree(hAllocator, ptr);
4050  }
4051 }
4052 
4054 // VmaStringBuilder
4055 
4056 #if VMA_STATS_STRING_ENABLED
4057 
4058 class VmaStringBuilder
4059 {
4060 public:
4061  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4062  size_t GetLength() const { return m_Data.size(); }
4063  const char* GetData() const { return m_Data.data(); }
4064 
4065  void Add(char ch) { m_Data.push_back(ch); }
4066  void Add(const char* pStr);
4067  void AddNewLine() { Add('\n'); }
4068  void AddNumber(uint32_t num);
4069  void AddNumber(uint64_t num);
4070  void AddPointer(const void* ptr);
4071 
4072 private:
4073  VmaVector< char, VmaStlAllocator<char> > m_Data;
4074 };
4075 
4076 void VmaStringBuilder::Add(const char* pStr)
4077 {
4078  const size_t strLen = strlen(pStr);
4079  if(strLen > 0)
4080  {
4081  const size_t oldCount = m_Data.size();
4082  m_Data.resize(oldCount + strLen);
4083  memcpy(m_Data.data() + oldCount, pStr, strLen);
4084  }
4085 }
4086 
4087 void VmaStringBuilder::AddNumber(uint32_t num)
4088 {
4089  char buf[11];
4090  VmaUint32ToStr(buf, sizeof(buf), num);
4091  Add(buf);
4092 }
4093 
4094 void VmaStringBuilder::AddNumber(uint64_t num)
4095 {
4096  char buf[21];
4097  VmaUint64ToStr(buf, sizeof(buf), num);
4098  Add(buf);
4099 }
4100 
4101 void VmaStringBuilder::AddPointer(const void* ptr)
4102 {
4103  char buf[21];
4104  VmaPtrToStr(buf, sizeof(buf), ptr);
4105  Add(buf);
4106 }
4107 
4108 #endif // #if VMA_STATS_STRING_ENABLED
4109 
4111 // VmaJsonWriter
4112 
4113 #if VMA_STATS_STRING_ENABLED
4114 
4115 class VmaJsonWriter
4116 {
4117 public:
4118  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4119  ~VmaJsonWriter();
4120 
4121  void BeginObject(bool singleLine = false);
4122  void EndObject();
4123 
4124  void BeginArray(bool singleLine = false);
4125  void EndArray();
4126 
4127  void WriteString(const char* pStr);
4128  void BeginString(const char* pStr = VMA_NULL);
4129  void ContinueString(const char* pStr);
4130  void ContinueString(uint32_t n);
4131  void ContinueString(uint64_t n);
4132  void ContinueString_Pointer(const void* ptr);
4133  void EndString(const char* pStr = VMA_NULL);
4134 
4135  void WriteNumber(uint32_t n);
4136  void WriteNumber(uint64_t n);
4137  void WriteBool(bool b);
4138  void WriteNull();
4139 
4140 private:
4141  static const char* const INDENT;
4142 
4143  enum COLLECTION_TYPE
4144  {
4145  COLLECTION_TYPE_OBJECT,
4146  COLLECTION_TYPE_ARRAY,
4147  };
4148  struct StackItem
4149  {
4150  COLLECTION_TYPE type;
4151  uint32_t valueCount;
4152  bool singleLineMode;
4153  };
4154 
4155  VmaStringBuilder& m_SB;
4156  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4157  bool m_InsideString;
4158 
4159  void BeginValue(bool isString);
4160  void WriteIndent(bool oneLess = false);
4161 };
4162 
4163 const char* const VmaJsonWriter::INDENT = " ";
4164 
4165 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4166  m_SB(sb),
4167  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4168  m_InsideString(false)
4169 {
4170 }
4171 
4172 VmaJsonWriter::~VmaJsonWriter()
4173 {
4174  VMA_ASSERT(!m_InsideString);
4175  VMA_ASSERT(m_Stack.empty());
4176 }
4177 
4178 void VmaJsonWriter::BeginObject(bool singleLine)
4179 {
4180  VMA_ASSERT(!m_InsideString);
4181 
4182  BeginValue(false);
4183  m_SB.Add('{');
4184 
4185  StackItem item;
4186  item.type = COLLECTION_TYPE_OBJECT;
4187  item.valueCount = 0;
4188  item.singleLineMode = singleLine;
4189  m_Stack.push_back(item);
4190 }
4191 
4192 void VmaJsonWriter::EndObject()
4193 {
4194  VMA_ASSERT(!m_InsideString);
4195 
4196  WriteIndent(true);
4197  m_SB.Add('}');
4198 
4199  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4200  m_Stack.pop_back();
4201 }
4202 
4203 void VmaJsonWriter::BeginArray(bool singleLine)
4204 {
4205  VMA_ASSERT(!m_InsideString);
4206 
4207  BeginValue(false);
4208  m_SB.Add('[');
4209 
4210  StackItem item;
4211  item.type = COLLECTION_TYPE_ARRAY;
4212  item.valueCount = 0;
4213  item.singleLineMode = singleLine;
4214  m_Stack.push_back(item);
4215 }
4216 
4217 void VmaJsonWriter::EndArray()
4218 {
4219  VMA_ASSERT(!m_InsideString);
4220 
4221  WriteIndent(true);
4222  m_SB.Add(']');
4223 
4224  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4225  m_Stack.pop_back();
4226 }
4227 
4228 void VmaJsonWriter::WriteString(const char* pStr)
4229 {
4230  BeginString(pStr);
4231  EndString();
4232 }
4233 
4234 void VmaJsonWriter::BeginString(const char* pStr)
4235 {
4236  VMA_ASSERT(!m_InsideString);
4237 
4238  BeginValue(true);
4239  m_SB.Add('"');
4240  m_InsideString = true;
4241  if(pStr != VMA_NULL && pStr[0] != '\0')
4242  {
4243  ContinueString(pStr);
4244  }
4245 }
4246 
4247 void VmaJsonWriter::ContinueString(const char* pStr)
4248 {
4249  VMA_ASSERT(m_InsideString);
4250 
4251  const size_t strLen = strlen(pStr);
4252  for(size_t i = 0; i < strLen; ++i)
4253  {
4254  char ch = pStr[i];
4255  if(ch == '\'')
4256  {
4257  m_SB.Add("\\\\");
4258  }
4259  else if(ch == '"')
4260  {
4261  m_SB.Add("\\\"");
4262  }
4263  else if(ch >= 32)
4264  {
4265  m_SB.Add(ch);
4266  }
4267  else switch(ch)
4268  {
4269  case '\b':
4270  m_SB.Add("\\b");
4271  break;
4272  case '\f':
4273  m_SB.Add("\\f");
4274  break;
4275  case '\n':
4276  m_SB.Add("\\n");
4277  break;
4278  case '\r':
4279  m_SB.Add("\\r");
4280  break;
4281  case '\t':
4282  m_SB.Add("\\t");
4283  break;
4284  default:
4285  VMA_ASSERT(0 && "Character not currently supported.");
4286  break;
4287  }
4288  }
4289 }
4290 
4291 void VmaJsonWriter::ContinueString(uint32_t n)
4292 {
4293  VMA_ASSERT(m_InsideString);
4294  m_SB.AddNumber(n);
4295 }
4296 
4297 void VmaJsonWriter::ContinueString(uint64_t n)
4298 {
4299  VMA_ASSERT(m_InsideString);
4300  m_SB.AddNumber(n);
4301 }
4302 
4303 void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
4304 {
4305  VMA_ASSERT(m_InsideString);
4306  m_SB.AddPointer(ptr);
4307 }
4308 
4309 void VmaJsonWriter::EndString(const char* pStr)
4310 {
4311  VMA_ASSERT(m_InsideString);
4312  if(pStr != VMA_NULL && pStr[0] != '\0')
4313  {
4314  ContinueString(pStr);
4315  }
4316  m_SB.Add('"');
4317  m_InsideString = false;
4318 }
4319 
4320 void VmaJsonWriter::WriteNumber(uint32_t n)
4321 {
4322  VMA_ASSERT(!m_InsideString);
4323  BeginValue(false);
4324  m_SB.AddNumber(n);
4325 }
4326 
4327 void VmaJsonWriter::WriteNumber(uint64_t n)
4328 {
4329  VMA_ASSERT(!m_InsideString);
4330  BeginValue(false);
4331  m_SB.AddNumber(n);
4332 }
4333 
4334 void VmaJsonWriter::WriteBool(bool b)
4335 {
4336  VMA_ASSERT(!m_InsideString);
4337  BeginValue(false);
4338  m_SB.Add(b ? "true" : "false");
4339 }
4340 
4341 void VmaJsonWriter::WriteNull()
4342 {
4343  VMA_ASSERT(!m_InsideString);
4344  BeginValue(false);
4345  m_SB.Add("null");
4346 }
4347 
4348 void VmaJsonWriter::BeginValue(bool isString)
4349 {
4350  if(!m_Stack.empty())
4351  {
4352  StackItem& currItem = m_Stack.back();
4353  if(currItem.type == COLLECTION_TYPE_OBJECT &&
4354  currItem.valueCount % 2 == 0)
4355  {
4356  VMA_ASSERT(isString);
4357  }
4358 
4359  if(currItem.type == COLLECTION_TYPE_OBJECT &&
4360  currItem.valueCount % 2 != 0)
4361  {
4362  m_SB.Add(": ");
4363  }
4364  else if(currItem.valueCount > 0)
4365  {
4366  m_SB.Add(", ");
4367  WriteIndent();
4368  }
4369  else
4370  {
4371  WriteIndent();
4372  }
4373  ++currItem.valueCount;
4374  }
4375 }
4376 
4377 void VmaJsonWriter::WriteIndent(bool oneLess)
4378 {
4379  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4380  {
4381  m_SB.AddNewLine();
4382 
4383  size_t count = m_Stack.size();
4384  if(count > 0 && oneLess)
4385  {
4386  --count;
4387  }
4388  for(size_t i = 0; i < count; ++i)
4389  {
4390  m_SB.Add(INDENT);
4391  }
4392  }
4393 }
4394 
4395 #endif // #if VMA_STATS_STRING_ENABLED
4396 
4398 
4399 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
4400 {
4401  if(IsUserDataString())
4402  {
4403  VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4404 
4405  FreeUserDataString(hAllocator);
4406 
4407  if(pUserData != VMA_NULL)
4408  {
4409  const char* const newStrSrc = (char*)pUserData;
4410  const size_t newStrLen = strlen(newStrSrc);
4411  char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
4412  memcpy(newStrDst, newStrSrc, newStrLen + 1);
4413  m_pUserData = newStrDst;
4414  }
4415  }
4416  else
4417  {
4418  m_pUserData = pUserData;
4419  }
4420 }
4421 
4422 void VmaAllocation_T::ChangeBlockAllocation(
4423  VmaAllocator hAllocator,
4424  VmaDeviceMemoryBlock* block,
4425  VkDeviceSize offset)
4426 {
4427  VMA_ASSERT(block != VMA_NULL);
4428  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4429 
4430  // Move mapping reference counter from old block to new block.
4431  if(block != m_BlockAllocation.m_Block)
4432  {
4433  uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4434  if(IsPersistentMap())
4435  ++mapRefCount;
4436  m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4437  block->Map(hAllocator, mapRefCount, VMA_NULL);
4438  }
4439 
4440  m_BlockAllocation.m_Block = block;
4441  m_BlockAllocation.m_Offset = offset;
4442 }
4443 
4444 VkDeviceSize VmaAllocation_T::GetOffset() const
4445 {
4446  switch(m_Type)
4447  {
4448  case ALLOCATION_TYPE_BLOCK:
4449  return m_BlockAllocation.m_Offset;
4450  case ALLOCATION_TYPE_DEDICATED:
4451  return 0;
4452  default:
4453  VMA_ASSERT(0);
4454  return 0;
4455  }
4456 }
4457 
4458 VkDeviceMemory VmaAllocation_T::GetMemory() const
4459 {
4460  switch(m_Type)
4461  {
4462  case ALLOCATION_TYPE_BLOCK:
4463  return m_BlockAllocation.m_Block->m_hMemory;
4464  case ALLOCATION_TYPE_DEDICATED:
4465  return m_DedicatedAllocation.m_hMemory;
4466  default:
4467  VMA_ASSERT(0);
4468  return VK_NULL_HANDLE;
4469  }
4470 }
4471 
4472 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
4473 {
4474  switch(m_Type)
4475  {
4476  case ALLOCATION_TYPE_BLOCK:
4477  return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4478  case ALLOCATION_TYPE_DEDICATED:
4479  return m_DedicatedAllocation.m_MemoryTypeIndex;
4480  default:
4481  VMA_ASSERT(0);
4482  return UINT32_MAX;
4483  }
4484 }
4485 
4486 void* VmaAllocation_T::GetMappedData() const
4487 {
4488  switch(m_Type)
4489  {
4490  case ALLOCATION_TYPE_BLOCK:
4491  if(m_MapCount != 0)
4492  {
4493  void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4494  VMA_ASSERT(pBlockData != VMA_NULL);
4495  return (char*)pBlockData + m_BlockAllocation.m_Offset;
4496  }
4497  else
4498  {
4499  return VMA_NULL;
4500  }
4501  break;
4502  case ALLOCATION_TYPE_DEDICATED:
4503  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4504  return m_DedicatedAllocation.m_pMappedData;
4505  default:
4506  VMA_ASSERT(0);
4507  return VMA_NULL;
4508  }
4509 }
4510 
4511 bool VmaAllocation_T::CanBecomeLost() const
4512 {
4513  switch(m_Type)
4514  {
4515  case ALLOCATION_TYPE_BLOCK:
4516  return m_BlockAllocation.m_CanBecomeLost;
4517  case ALLOCATION_TYPE_DEDICATED:
4518  return false;
4519  default:
4520  VMA_ASSERT(0);
4521  return false;
4522  }
4523 }
4524 
4525 VmaPool VmaAllocation_T::GetPool() const
4526 {
4527  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4528  return m_BlockAllocation.m_hPool;
4529 }
4530 
4531 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4532 {
4533  VMA_ASSERT(CanBecomeLost());
4534 
4535  /*
4536  Warning: This is a carefully designed algorithm.
4537  Do not modify unless you really know what you're doing :)
4538  */
4539  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4540  for(;;)
4541  {
4542  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4543  {
4544  VMA_ASSERT(0);
4545  return false;
4546  }
4547  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4548  {
4549  return false;
4550  }
4551  else // Last use time earlier than current time.
4552  {
4553  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4554  {
4555  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
4556  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
4557  return true;
4558  }
4559  }
4560  }
4561 }
4562 
4563 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4564 {
4565  VMA_ASSERT(IsUserDataString());
4566  if(m_pUserData != VMA_NULL)
4567  {
4568  char* const oldStr = (char*)m_pUserData;
4569  const size_t oldStrLen = strlen(oldStr);
4570  vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4571  m_pUserData = VMA_NULL;
4572  }
4573 }
4574 
4575 void VmaAllocation_T::BlockAllocMap()
4576 {
4577  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4578 
4579  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4580  {
4581  ++m_MapCount;
4582  }
4583  else
4584  {
4585  VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
4586  }
4587 }
4588 
4589 void VmaAllocation_T::BlockAllocUnmap()
4590 {
4591  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4592 
4593  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4594  {
4595  --m_MapCount;
4596  }
4597  else
4598  {
4599  VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
4600  }
4601 }
4602 
4603 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
4604 {
4605  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4606 
4607  if(m_MapCount != 0)
4608  {
4609  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4610  {
4611  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4612  *ppData = m_DedicatedAllocation.m_pMappedData;
4613  ++m_MapCount;
4614  return VK_SUCCESS;
4615  }
4616  else
4617  {
4618  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
4619  return VK_ERROR_MEMORY_MAP_FAILED;
4620  }
4621  }
4622  else
4623  {
4624  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4625  hAllocator->m_hDevice,
4626  m_DedicatedAllocation.m_hMemory,
4627  0, // offset
4628  VK_WHOLE_SIZE,
4629  0, // flags
4630  ppData);
4631  if(result == VK_SUCCESS)
4632  {
4633  m_DedicatedAllocation.m_pMappedData = *ppData;
4634  m_MapCount = 1;
4635  }
4636  return result;
4637  }
4638 }
4639 
4640 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4641 {
4642  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4643 
4644  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4645  {
4646  --m_MapCount;
4647  if(m_MapCount == 0)
4648  {
4649  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4650  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4651  hAllocator->m_hDevice,
4652  m_DedicatedAllocation.m_hMemory);
4653  }
4654  }
4655  else
4656  {
4657  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
4658  }
4659 }
4660 
4661 #if VMA_STATS_STRING_ENABLED
4662 
4663 // Correspond to values of enum VmaSuballocationType.
4664 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4665  "FREE",
4666  "UNKNOWN",
4667  "BUFFER",
4668  "IMAGE_UNKNOWN",
4669  "IMAGE_LINEAR",
4670  "IMAGE_OPTIMAL",
4671 };
4672 
4673 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
4674 {
4675  json.BeginObject();
4676 
4677  json.WriteString("Blocks");
4678  json.WriteNumber(stat.blockCount);
4679 
4680  json.WriteString("Allocations");
4681  json.WriteNumber(stat.allocationCount);
4682 
4683  json.WriteString("UnusedRanges");
4684  json.WriteNumber(stat.unusedRangeCount);
4685 
4686  json.WriteString("UsedBytes");
4687  json.WriteNumber(stat.usedBytes);
4688 
4689  json.WriteString("UnusedBytes");
4690  json.WriteNumber(stat.unusedBytes);
4691 
4692  if(stat.allocationCount > 1)
4693  {
4694  json.WriteString("AllocationSize");
4695  json.BeginObject(true);
4696  json.WriteString("Min");
4697  json.WriteNumber(stat.allocationSizeMin);
4698  json.WriteString("Avg");
4699  json.WriteNumber(stat.allocationSizeAvg);
4700  json.WriteString("Max");
4701  json.WriteNumber(stat.allocationSizeMax);
4702  json.EndObject();
4703  }
4704 
4705  if(stat.unusedRangeCount > 1)
4706  {
4707  json.WriteString("UnusedRangeSize");
4708  json.BeginObject(true);
4709  json.WriteString("Min");
4710  json.WriteNumber(stat.unusedRangeSizeMin);
4711  json.WriteString("Avg");
4712  json.WriteNumber(stat.unusedRangeSizeAvg);
4713  json.WriteString("Max");
4714  json.WriteNumber(stat.unusedRangeSizeMax);
4715  json.EndObject();
4716  }
4717 
4718  json.EndObject();
4719 }
4720 
4721 #endif // #if VMA_STATS_STRING_ENABLED
4722 
4723 struct VmaSuballocationItemSizeLess
4724 {
4725  bool operator()(
4726  const VmaSuballocationList::iterator lhs,
4727  const VmaSuballocationList::iterator rhs) const
4728  {
4729  return lhs->size < rhs->size;
4730  }
4731  bool operator()(
4732  const VmaSuballocationList::iterator lhs,
4733  VkDeviceSize rhsSize) const
4734  {
4735  return lhs->size < rhsSize;
4736  }
4737 };
4738 
4740 // class VmaBlockMetadata
4741 
4742 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4743  m_Size(0),
4744  m_FreeCount(0),
4745  m_SumFreeSize(0),
4746  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4747  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4748 {
4749 }
4750 
4751 VmaBlockMetadata::~VmaBlockMetadata()
4752 {
4753 }
4754 
4755 void VmaBlockMetadata::Init(VkDeviceSize size)
4756 {
4757  m_Size = size;
4758  m_FreeCount = 1;
4759  m_SumFreeSize = size;
4760 
4761  VmaSuballocation suballoc = {};
4762  suballoc.offset = 0;
4763  suballoc.size = size;
4764  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4765  suballoc.hAllocation = VK_NULL_HANDLE;
4766 
4767  m_Suballocations.push_back(suballoc);
4768  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4769  --suballocItem;
4770  m_FreeSuballocationsBySize.push_back(suballocItem);
4771 }
4772 
4773 bool VmaBlockMetadata::Validate() const
4774 {
4775  if(m_Suballocations.empty())
4776  {
4777  return false;
4778  }
4779 
4780  // Expected offset of new suballocation as calculates from previous ones.
4781  VkDeviceSize calculatedOffset = 0;
4782  // Expected number of free suballocations as calculated from traversing their list.
4783  uint32_t calculatedFreeCount = 0;
4784  // Expected sum size of free suballocations as calculated from traversing their list.
4785  VkDeviceSize calculatedSumFreeSize = 0;
4786  // Expected number of free suballocations that should be registered in
4787  // m_FreeSuballocationsBySize calculated from traversing their list.
4788  size_t freeSuballocationsToRegister = 0;
4789  // True if previous visisted suballocation was free.
4790  bool prevFree = false;
4791 
4792  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4793  suballocItem != m_Suballocations.cend();
4794  ++suballocItem)
4795  {
4796  const VmaSuballocation& subAlloc = *suballocItem;
4797 
4798  // Actual offset of this suballocation doesn't match expected one.
4799  if(subAlloc.offset != calculatedOffset)
4800  {
4801  return false;
4802  }
4803 
4804  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4805  // Two adjacent free suballocations are invalid. They should be merged.
4806  if(prevFree && currFree)
4807  {
4808  return false;
4809  }
4810 
4811  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4812  {
4813  return false;
4814  }
4815 
4816  if(currFree)
4817  {
4818  calculatedSumFreeSize += subAlloc.size;
4819  ++calculatedFreeCount;
4820  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4821  {
4822  ++freeSuballocationsToRegister;
4823  }
4824  }
4825  else
4826  {
4827  if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
4828  {
4829  return false;
4830  }
4831  if(subAlloc.hAllocation->GetSize() != subAlloc.size)
4832  {
4833  return false;
4834  }
4835  }
4836 
4837  calculatedOffset += subAlloc.size;
4838  prevFree = currFree;
4839  }
4840 
4841  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
4842  // match expected one.
4843  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4844  {
4845  return false;
4846  }
4847 
4848  VkDeviceSize lastSize = 0;
4849  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4850  {
4851  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4852 
4853  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
4854  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4855  {
4856  return false;
4857  }
4858  // They must be sorted by size ascending.
4859  if(suballocItem->size < lastSize)
4860  {
4861  return false;
4862  }
4863 
4864  lastSize = suballocItem->size;
4865  }
4866 
4867  // Check if totals match calculacted values.
4868  if(!ValidateFreeSuballocationList() ||
4869  (calculatedOffset != m_Size) ||
4870  (calculatedSumFreeSize != m_SumFreeSize) ||
4871  (calculatedFreeCount != m_FreeCount))
4872  {
4873  return false;
4874  }
4875 
4876  return true;
4877 }
4878 
4879 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
4880 {
4881  if(!m_FreeSuballocationsBySize.empty())
4882  {
4883  return m_FreeSuballocationsBySize.back()->size;
4884  }
4885  else
4886  {
4887  return 0;
4888  }
4889 }
4890 
4891 bool VmaBlockMetadata::IsEmpty() const
4892 {
4893  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4894 }
4895 
4896 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
4897 {
4898  outInfo.blockCount = 1;
4899 
4900  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4901  outInfo.allocationCount = rangeCount - m_FreeCount;
4902  outInfo.unusedRangeCount = m_FreeCount;
4903 
4904  outInfo.unusedBytes = m_SumFreeSize;
4905  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
4906 
4907  outInfo.allocationSizeMin = UINT64_MAX;
4908  outInfo.allocationSizeMax = 0;
4909  outInfo.unusedRangeSizeMin = UINT64_MAX;
4910  outInfo.unusedRangeSizeMax = 0;
4911 
4912  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4913  suballocItem != m_Suballocations.cend();
4914  ++suballocItem)
4915  {
4916  const VmaSuballocation& suballoc = *suballocItem;
4917  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4918  {
4919  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
4920  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
4921  }
4922  else
4923  {
4924  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
4925  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
4926  }
4927  }
4928 }
4929 
4930 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
4931 {
4932  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4933 
4934  inoutStats.size += m_Size;
4935  inoutStats.unusedSize += m_SumFreeSize;
4936  inoutStats.allocationCount += rangeCount - m_FreeCount;
4937  inoutStats.unusedRangeCount += m_FreeCount;
4938  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
4939 }
4940 
4941 #if VMA_STATS_STRING_ENABLED
4942 
4943 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
4944 {
4945  json.BeginObject();
4946 
4947  json.WriteString("TotalBytes");
4948  json.WriteNumber(m_Size);
4949 
4950  json.WriteString("UnusedBytes");
4951  json.WriteNumber(m_SumFreeSize);
4952 
4953  json.WriteString("Allocations");
4954  json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4955 
4956  json.WriteString("UnusedRanges");
4957  json.WriteNumber(m_FreeCount);
4958 
4959  json.WriteString("Suballocations");
4960  json.BeginArray();
4961  size_t i = 0;
4962  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4963  suballocItem != m_Suballocations.cend();
4964  ++suballocItem, ++i)
4965  {
4966  json.BeginObject(true);
4967 
4968  json.WriteString("Type");
4969  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4970 
4971  json.WriteString("Size");
4972  json.WriteNumber(suballocItem->size);
4973 
4974  json.WriteString("Offset");
4975  json.WriteNumber(suballocItem->offset);
4976 
4977  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4978  {
4979  const void* pUserData = suballocItem->hAllocation->GetUserData();
4980  if(pUserData != VMA_NULL)
4981  {
4982  json.WriteString("UserData");
4983  if(suballocItem->hAllocation->IsUserDataString())
4984  {
4985  json.WriteString((const char*)pUserData);
4986  }
4987  else
4988  {
4989  json.BeginString();
4990  json.ContinueString_Pointer(pUserData);
4991  json.EndString();
4992  }
4993  }
4994  }
4995 
4996  json.EndObject();
4997  }
4998  json.EndArray();
4999 
5000  json.EndObject();
5001 }
5002 
5003 #endif // #if VMA_STATS_STRING_ENABLED
5004 
5005 /*
5006 How many suitable free suballocations to analyze before choosing best one.
5007 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
5008  be chosen.
5009 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
5010  suballocations will be analized and best one will be chosen.
5011 - Any other value is also acceptable.
5012 */
5013 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
5014 
5015 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5016 {
5017  VMA_ASSERT(IsEmpty());
5018  pAllocationRequest->offset = 0;
5019  pAllocationRequest->sumFreeSize = m_SumFreeSize;
5020  pAllocationRequest->sumItemSize = 0;
5021  pAllocationRequest->item = m_Suballocations.begin();
5022  pAllocationRequest->itemsToMakeLostCount = 0;
5023 }
5024 
5025 bool VmaBlockMetadata::CreateAllocationRequest(
5026  uint32_t currentFrameIndex,
5027  uint32_t frameInUseCount,
5028  VkDeviceSize bufferImageGranularity,
5029  VkDeviceSize allocSize,
5030  VkDeviceSize allocAlignment,
5031  VmaSuballocationType allocType,
5032  bool canMakeOtherLost,
5033  VmaAllocationRequest* pAllocationRequest)
5034 {
5035  VMA_ASSERT(allocSize > 0);
5036  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5037  VMA_ASSERT(pAllocationRequest != VMA_NULL);
5038  VMA_HEAVY_ASSERT(Validate());
5039 
5040  // There is not enough total free space in this block to fullfill the request: Early return.
5041  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
5042  {
5043  return false;
5044  }
5045 
5046  // New algorithm, efficiently searching freeSuballocationsBySize.
5047  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5048  if(freeSuballocCount > 0)
5049  {
5050  if(VMA_BEST_FIT)
5051  {
5052  // Find first free suballocation with size not less than allocSize.
5053  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
5054  m_FreeSuballocationsBySize.data(),
5055  m_FreeSuballocationsBySize.data() + freeSuballocCount,
5056  allocSize,
5057  VmaSuballocationItemSizeLess());
5058  size_t index = it - m_FreeSuballocationsBySize.data();
5059  for(; index < freeSuballocCount; ++index)
5060  {
5061  if(CheckAllocation(
5062  currentFrameIndex,
5063  frameInUseCount,
5064  bufferImageGranularity,
5065  allocSize,
5066  allocAlignment,
5067  allocType,
5068  m_FreeSuballocationsBySize[index],
5069  false, // canMakeOtherLost
5070  &pAllocationRequest->offset,
5071  &pAllocationRequest->itemsToMakeLostCount,
5072  &pAllocationRequest->sumFreeSize,
5073  &pAllocationRequest->sumItemSize))
5074  {
5075  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5076  return true;
5077  }
5078  }
5079  }
5080  else
5081  {
5082  // Search staring from biggest suballocations.
5083  for(size_t index = freeSuballocCount; index--; )
5084  {
5085  if(CheckAllocation(
5086  currentFrameIndex,
5087  frameInUseCount,
5088  bufferImageGranularity,
5089  allocSize,
5090  allocAlignment,
5091  allocType,
5092  m_FreeSuballocationsBySize[index],
5093  false, // canMakeOtherLost
5094  &pAllocationRequest->offset,
5095  &pAllocationRequest->itemsToMakeLostCount,
5096  &pAllocationRequest->sumFreeSize,
5097  &pAllocationRequest->sumItemSize))
5098  {
5099  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5100  return true;
5101  }
5102  }
5103  }
5104  }
5105 
5106  if(canMakeOtherLost)
5107  {
5108  // Brute-force algorithm. TODO: Come up with something better.
5109 
5110  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5111  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5112 
5113  VmaAllocationRequest tmpAllocRequest = {};
5114  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5115  suballocIt != m_Suballocations.end();
5116  ++suballocIt)
5117  {
5118  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5119  suballocIt->hAllocation->CanBecomeLost())
5120  {
5121  if(CheckAllocation(
5122  currentFrameIndex,
5123  frameInUseCount,
5124  bufferImageGranularity,
5125  allocSize,
5126  allocAlignment,
5127  allocType,
5128  suballocIt,
5129  canMakeOtherLost,
5130  &tmpAllocRequest.offset,
5131  &tmpAllocRequest.itemsToMakeLostCount,
5132  &tmpAllocRequest.sumFreeSize,
5133  &tmpAllocRequest.sumItemSize))
5134  {
5135  tmpAllocRequest.item = suballocIt;
5136 
5137  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5138  {
5139  *pAllocationRequest = tmpAllocRequest;
5140  }
5141  }
5142  }
5143  }
5144 
5145  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5146  {
5147  return true;
5148  }
5149  }
5150 
5151  return false;
5152 }
5153 
5154 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5155  uint32_t currentFrameIndex,
5156  uint32_t frameInUseCount,
5157  VmaAllocationRequest* pAllocationRequest)
5158 {
5159  while(pAllocationRequest->itemsToMakeLostCount > 0)
5160  {
5161  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5162  {
5163  ++pAllocationRequest->item;
5164  }
5165  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5166  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5167  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5168  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5169  {
5170  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5171  --pAllocationRequest->itemsToMakeLostCount;
5172  }
5173  else
5174  {
5175  return false;
5176  }
5177  }
5178 
5179  VMA_HEAVY_ASSERT(Validate());
5180  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5181  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5182 
5183  return true;
5184 }
5185 
5186 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5187 {
5188  uint32_t lostAllocationCount = 0;
5189  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5190  it != m_Suballocations.end();
5191  ++it)
5192  {
5193  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5194  it->hAllocation->CanBecomeLost() &&
5195  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5196  {
5197  it = FreeSuballocation(it);
5198  ++lostAllocationCount;
5199  }
5200  }
5201  return lostAllocationCount;
5202 }
5203 
5204 void VmaBlockMetadata::Alloc(
5205  const VmaAllocationRequest& request,
5206  VmaSuballocationType type,
5207  VkDeviceSize allocSize,
5208  VmaAllocation hAllocation)
5209 {
5210  VMA_ASSERT(request.item != m_Suballocations.end());
5211  VmaSuballocation& suballoc = *request.item;
5212  // Given suballocation is a free block.
5213  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5214  // Given offset is inside this suballocation.
5215  VMA_ASSERT(request.offset >= suballoc.offset);
5216  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5217  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5218  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5219 
5220  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
5221  // it to become used.
5222  UnregisterFreeSuballocation(request.item);
5223 
5224  suballoc.offset = request.offset;
5225  suballoc.size = allocSize;
5226  suballoc.type = type;
5227  suballoc.hAllocation = hAllocation;
5228 
5229  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
5230  if(paddingEnd)
5231  {
5232  VmaSuballocation paddingSuballoc = {};
5233  paddingSuballoc.offset = request.offset + allocSize;
5234  paddingSuballoc.size = paddingEnd;
5235  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5236  VmaSuballocationList::iterator next = request.item;
5237  ++next;
5238  const VmaSuballocationList::iterator paddingEndItem =
5239  m_Suballocations.insert(next, paddingSuballoc);
5240  RegisterFreeSuballocation(paddingEndItem);
5241  }
5242 
5243  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
5244  if(paddingBegin)
5245  {
5246  VmaSuballocation paddingSuballoc = {};
5247  paddingSuballoc.offset = request.offset - paddingBegin;
5248  paddingSuballoc.size = paddingBegin;
5249  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5250  const VmaSuballocationList::iterator paddingBeginItem =
5251  m_Suballocations.insert(request.item, paddingSuballoc);
5252  RegisterFreeSuballocation(paddingBeginItem);
5253  }
5254 
5255  // Update totals.
5256  m_FreeCount = m_FreeCount - 1;
5257  if(paddingBegin > 0)
5258  {
5259  ++m_FreeCount;
5260  }
5261  if(paddingEnd > 0)
5262  {
5263  ++m_FreeCount;
5264  }
5265  m_SumFreeSize -= allocSize;
5266 }
5267 
5268 void VmaBlockMetadata::Free(const VmaAllocation allocation)
5269 {
5270  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5271  suballocItem != m_Suballocations.end();
5272  ++suballocItem)
5273  {
5274  VmaSuballocation& suballoc = *suballocItem;
5275  if(suballoc.hAllocation == allocation)
5276  {
5277  FreeSuballocation(suballocItem);
5278  VMA_HEAVY_ASSERT(Validate());
5279  return;
5280  }
5281  }
5282  VMA_ASSERT(0 && "Not found!");
5283 }
5284 
5285 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5286 {
5287  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5288  suballocItem != m_Suballocations.end();
5289  ++suballocItem)
5290  {
5291  VmaSuballocation& suballoc = *suballocItem;
5292  if(suballoc.offset == offset)
5293  {
5294  FreeSuballocation(suballocItem);
5295  return;
5296  }
5297  }
5298  VMA_ASSERT(0 && "Not found!");
5299 }
5300 
5301 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
5302 {
5303  VkDeviceSize lastSize = 0;
5304  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5305  {
5306  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5307 
5308  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5309  {
5310  VMA_ASSERT(0);
5311  return false;
5312  }
5313  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5314  {
5315  VMA_ASSERT(0);
5316  return false;
5317  }
5318  if(it->size < lastSize)
5319  {
5320  VMA_ASSERT(0);
5321  return false;
5322  }
5323 
5324  lastSize = it->size;
5325  }
5326  return true;
5327 }
5328 
5329 bool VmaBlockMetadata::CheckAllocation(
5330  uint32_t currentFrameIndex,
5331  uint32_t frameInUseCount,
5332  VkDeviceSize bufferImageGranularity,
5333  VkDeviceSize allocSize,
5334  VkDeviceSize allocAlignment,
5335  VmaSuballocationType allocType,
5336  VmaSuballocationList::const_iterator suballocItem,
5337  bool canMakeOtherLost,
5338  VkDeviceSize* pOffset,
5339  size_t* itemsToMakeLostCount,
5340  VkDeviceSize* pSumFreeSize,
5341  VkDeviceSize* pSumItemSize) const
5342 {
5343  VMA_ASSERT(allocSize > 0);
5344  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5345  VMA_ASSERT(suballocItem != m_Suballocations.cend());
5346  VMA_ASSERT(pOffset != VMA_NULL);
5347 
5348  *itemsToMakeLostCount = 0;
5349  *pSumFreeSize = 0;
5350  *pSumItemSize = 0;
5351 
5352  if(canMakeOtherLost)
5353  {
5354  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5355  {
5356  *pSumFreeSize = suballocItem->size;
5357  }
5358  else
5359  {
5360  if(suballocItem->hAllocation->CanBecomeLost() &&
5361  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5362  {
5363  ++*itemsToMakeLostCount;
5364  *pSumItemSize = suballocItem->size;
5365  }
5366  else
5367  {
5368  return false;
5369  }
5370  }
5371 
5372  // Remaining size is too small for this request: Early return.
5373  if(m_Size - suballocItem->offset < allocSize)
5374  {
5375  return false;
5376  }
5377 
5378  // Start from offset equal to beginning of this suballocation.
5379  *pOffset = suballocItem->offset;
5380 
5381  // Apply VMA_DEBUG_MARGIN at the beginning.
5382  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5383  {
5384  *pOffset += VMA_DEBUG_MARGIN;
5385  }
5386 
5387  // Apply alignment.
5388  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5389  *pOffset = VmaAlignUp(*pOffset, alignment);
5390 
5391  // Check previous suballocations for BufferImageGranularity conflicts.
5392  // Make bigger alignment if necessary.
5393  if(bufferImageGranularity > 1)
5394  {
5395  bool bufferImageGranularityConflict = false;
5396  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5397  while(prevSuballocItem != m_Suballocations.cbegin())
5398  {
5399  --prevSuballocItem;
5400  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5401  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5402  {
5403  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5404  {
5405  bufferImageGranularityConflict = true;
5406  break;
5407  }
5408  }
5409  else
5410  // Already on previous page.
5411  break;
5412  }
5413  if(bufferImageGranularityConflict)
5414  {
5415  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5416  }
5417  }
5418 
5419  // Now that we have final *pOffset, check if we are past suballocItem.
5420  // If yes, return false - this function should be called for another suballocItem as starting point.
5421  if(*pOffset >= suballocItem->offset + suballocItem->size)
5422  {
5423  return false;
5424  }
5425 
5426  // Calculate padding at the beginning based on current offset.
5427  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5428 
5429  // Calculate required margin at the end if this is not last suballocation.
5430  VmaSuballocationList::const_iterator next = suballocItem;
5431  ++next;
5432  const VkDeviceSize requiredEndMargin =
5433  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5434 
5435  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5436  // Another early return check.
5437  if(suballocItem->offset + totalSize > m_Size)
5438  {
5439  return false;
5440  }
5441 
5442  // Advance lastSuballocItem until desired size is reached.
5443  // Update itemsToMakeLostCount.
5444  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5445  if(totalSize > suballocItem->size)
5446  {
5447  VkDeviceSize remainingSize = totalSize - suballocItem->size;
5448  while(remainingSize > 0)
5449  {
5450  ++lastSuballocItem;
5451  if(lastSuballocItem == m_Suballocations.cend())
5452  {
5453  return false;
5454  }
5455  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5456  {
5457  *pSumFreeSize += lastSuballocItem->size;
5458  }
5459  else
5460  {
5461  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5462  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5463  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5464  {
5465  ++*itemsToMakeLostCount;
5466  *pSumItemSize += lastSuballocItem->size;
5467  }
5468  else
5469  {
5470  return false;
5471  }
5472  }
5473  remainingSize = (lastSuballocItem->size < remainingSize) ?
5474  remainingSize - lastSuballocItem->size : 0;
5475  }
5476  }
5477 
5478  // Check next suballocations for BufferImageGranularity conflicts.
5479  // If conflict exists, we must mark more allocations lost or fail.
5480  if(bufferImageGranularity > 1)
5481  {
5482  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5483  ++nextSuballocItem;
5484  while(nextSuballocItem != m_Suballocations.cend())
5485  {
5486  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5487  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5488  {
5489  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5490  {
5491  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5492  if(nextSuballoc.hAllocation->CanBecomeLost() &&
5493  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5494  {
5495  ++*itemsToMakeLostCount;
5496  }
5497  else
5498  {
5499  return false;
5500  }
5501  }
5502  }
5503  else
5504  {
5505  // Already on next page.
5506  break;
5507  }
5508  ++nextSuballocItem;
5509  }
5510  }
5511  }
5512  else
5513  {
5514  const VmaSuballocation& suballoc = *suballocItem;
5515  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5516 
5517  *pSumFreeSize = suballoc.size;
5518 
5519  // Size of this suballocation is too small for this request: Early return.
5520  if(suballoc.size < allocSize)
5521  {
5522  return false;
5523  }
5524 
5525  // Start from offset equal to beginning of this suballocation.
5526  *pOffset = suballoc.offset;
5527 
5528  // Apply VMA_DEBUG_MARGIN at the beginning.
5529  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5530  {
5531  *pOffset += VMA_DEBUG_MARGIN;
5532  }
5533 
5534  // Apply alignment.
5535  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5536  *pOffset = VmaAlignUp(*pOffset, alignment);
5537 
5538  // Check previous suballocations for BufferImageGranularity conflicts.
5539  // Make bigger alignment if necessary.
5540  if(bufferImageGranularity > 1)
5541  {
5542  bool bufferImageGranularityConflict = false;
5543  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5544  while(prevSuballocItem != m_Suballocations.cbegin())
5545  {
5546  --prevSuballocItem;
5547  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5548  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5549  {
5550  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5551  {
5552  bufferImageGranularityConflict = true;
5553  break;
5554  }
5555  }
5556  else
5557  // Already on previous page.
5558  break;
5559  }
5560  if(bufferImageGranularityConflict)
5561  {
5562  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5563  }
5564  }
5565 
5566  // Calculate padding at the beginning based on current offset.
5567  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5568 
5569  // Calculate required margin at the end if this is not last suballocation.
5570  VmaSuballocationList::const_iterator next = suballocItem;
5571  ++next;
5572  const VkDeviceSize requiredEndMargin =
5573  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5574 
5575  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
5576  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5577  {
5578  return false;
5579  }
5580 
5581  // Check next suballocations for BufferImageGranularity conflicts.
5582  // If conflict exists, allocation cannot be made here.
5583  if(bufferImageGranularity > 1)
5584  {
5585  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5586  ++nextSuballocItem;
5587  while(nextSuballocItem != m_Suballocations.cend())
5588  {
5589  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5590  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5591  {
5592  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5593  {
5594  return false;
5595  }
5596  }
5597  else
5598  {
5599  // Already on next page.
5600  break;
5601  }
5602  ++nextSuballocItem;
5603  }
5604  }
5605  }
5606 
5607  // All tests passed: Success. pOffset is already filled.
5608  return true;
5609 }
5610 
5611 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5612 {
5613  VMA_ASSERT(item != m_Suballocations.end());
5614  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5615 
5616  VmaSuballocationList::iterator nextItem = item;
5617  ++nextItem;
5618  VMA_ASSERT(nextItem != m_Suballocations.end());
5619  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5620 
5621  item->size += nextItem->size;
5622  --m_FreeCount;
5623  m_Suballocations.erase(nextItem);
5624 }
5625 
5626 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5627 {
5628  // Change this suballocation to be marked as free.
5629  VmaSuballocation& suballoc = *suballocItem;
5630  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5631  suballoc.hAllocation = VK_NULL_HANDLE;
5632 
5633  // Update totals.
5634  ++m_FreeCount;
5635  m_SumFreeSize += suballoc.size;
5636 
5637  // Merge with previous and/or next suballocation if it's also free.
5638  bool mergeWithNext = false;
5639  bool mergeWithPrev = false;
5640 
5641  VmaSuballocationList::iterator nextItem = suballocItem;
5642  ++nextItem;
5643  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5644  {
5645  mergeWithNext = true;
5646  }
5647 
5648  VmaSuballocationList::iterator prevItem = suballocItem;
5649  if(suballocItem != m_Suballocations.begin())
5650  {
5651  --prevItem;
5652  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5653  {
5654  mergeWithPrev = true;
5655  }
5656  }
5657 
5658  if(mergeWithNext)
5659  {
5660  UnregisterFreeSuballocation(nextItem);
5661  MergeFreeWithNext(suballocItem);
5662  }
5663 
5664  if(mergeWithPrev)
5665  {
5666  UnregisterFreeSuballocation(prevItem);
5667  MergeFreeWithNext(prevItem);
5668  RegisterFreeSuballocation(prevItem);
5669  return prevItem;
5670  }
5671  else
5672  {
5673  RegisterFreeSuballocation(suballocItem);
5674  return suballocItem;
5675  }
5676 }
5677 
5678 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5679 {
5680  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5681  VMA_ASSERT(item->size > 0);
5682 
5683  // You may want to enable this validation at the beginning or at the end of
5684  // this function, depending on what do you want to check.
5685  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5686 
5687  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5688  {
5689  if(m_FreeSuballocationsBySize.empty())
5690  {
5691  m_FreeSuballocationsBySize.push_back(item);
5692  }
5693  else
5694  {
5695  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5696  }
5697  }
5698 
5699  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5700 }
5701 
5702 
5703 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5704 {
5705  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5706  VMA_ASSERT(item->size > 0);
5707 
5708  // You may want to enable this validation at the beginning or at the end of
5709  // this function, depending on what do you want to check.
5710  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5711 
5712  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5713  {
5714  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
5715  m_FreeSuballocationsBySize.data(),
5716  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5717  item,
5718  VmaSuballocationItemSizeLess());
5719  for(size_t index = it - m_FreeSuballocationsBySize.data();
5720  index < m_FreeSuballocationsBySize.size();
5721  ++index)
5722  {
5723  if(m_FreeSuballocationsBySize[index] == item)
5724  {
5725  VmaVectorRemove(m_FreeSuballocationsBySize, index);
5726  return;
5727  }
5728  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
5729  }
5730  VMA_ASSERT(0 && "Not found.");
5731  }
5732 
5733  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5734 }
5735 
5737 // class VmaDeviceMemoryMapping
5738 
5739 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5740  m_MapCount(0),
5741  m_pMappedData(VMA_NULL)
5742 {
5743 }
5744 
5745 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5746 {
5747  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
5748 }
5749 
5750 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count, void **ppData)
5751 {
5752  if(count == 0)
5753  {
5754  return VK_SUCCESS;
5755  }
5756 
5757  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5758  if(m_MapCount != 0)
5759  {
5760  m_MapCount += count;
5761  VMA_ASSERT(m_pMappedData != VMA_NULL);
5762  if(ppData != VMA_NULL)
5763  {
5764  *ppData = m_pMappedData;
5765  }
5766  return VK_SUCCESS;
5767  }
5768  else
5769  {
5770  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5771  hAllocator->m_hDevice,
5772  hMemory,
5773  0, // offset
5774  VK_WHOLE_SIZE,
5775  0, // flags
5776  &m_pMappedData);
5777  if(result == VK_SUCCESS)
5778  {
5779  if(ppData != VMA_NULL)
5780  {
5781  *ppData = m_pMappedData;
5782  }
5783  m_MapCount = count;
5784  }
5785  return result;
5786  }
5787 }
5788 
5789 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
5790 {
5791  if(count == 0)
5792  {
5793  return;
5794  }
5795 
5796  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5797  if(m_MapCount >= count)
5798  {
5799  m_MapCount -= count;
5800  if(m_MapCount == 0)
5801  {
5802  m_pMappedData = VMA_NULL;
5803  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5804  }
5805  }
5806  else
5807  {
5808  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
5809  }
5810 }
5811 
5813 // class VmaDeviceMemoryBlock
5814 
5815 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5816  m_MemoryTypeIndex(UINT32_MAX),
5817  m_hMemory(VK_NULL_HANDLE),
5818  m_Metadata(hAllocator)
5819 {
5820 }
5821 
5822 void VmaDeviceMemoryBlock::Init(
5823  uint32_t newMemoryTypeIndex,
5824  VkDeviceMemory newMemory,
5825  VkDeviceSize newSize)
5826 {
5827  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5828 
5829  m_MemoryTypeIndex = newMemoryTypeIndex;
5830  m_hMemory = newMemory;
5831 
5832  m_Metadata.Init(newSize);
5833 }
5834 
5835 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5836 {
5837  // This is the most important assert in the entire library.
5838  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
5839  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
5840 
5841  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5842  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5843  m_hMemory = VK_NULL_HANDLE;
5844 }
5845 
5846 bool VmaDeviceMemoryBlock::Validate() const
5847 {
5848  if((m_hMemory == VK_NULL_HANDLE) ||
5849  (m_Metadata.GetSize() == 0))
5850  {
5851  return false;
5852  }
5853 
5854  return m_Metadata.Validate();
5855 }
5856 
5857 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData)
5858 {
5859  return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
5860 }
5861 
5862 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
5863 {
5864  m_Mapping.Unmap(hAllocator, m_hMemory, count);
5865 }
5866 
5867 static void InitStatInfo(VmaStatInfo& outInfo)
5868 {
5869  memset(&outInfo, 0, sizeof(outInfo));
5870  outInfo.allocationSizeMin = UINT64_MAX;
5871  outInfo.unusedRangeSizeMin = UINT64_MAX;
5872 }
5873 
5874 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
5875 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
5876 {
5877  inoutInfo.blockCount += srcInfo.blockCount;
5878  inoutInfo.allocationCount += srcInfo.allocationCount;
5879  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
5880  inoutInfo.usedBytes += srcInfo.usedBytes;
5881  inoutInfo.unusedBytes += srcInfo.unusedBytes;
5882  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
5883  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
5884  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
5885  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
5886 }
5887 
5888 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
5889 {
5890  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
5891  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
5892  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
5893  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
5894 }
5895 
5896 VmaPool_T::VmaPool_T(
5897  VmaAllocator hAllocator,
5898  const VmaPoolCreateInfo& createInfo) :
5899  m_BlockVector(
5900  hAllocator,
5901  createInfo.memoryTypeIndex,
5902  createInfo.blockSize,
5903  createInfo.minBlockCount,
5904  createInfo.maxBlockCount,
5905  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
5906  createInfo.frameInUseCount,
5907  true) // isCustomPool
5908 {
5909 }
5910 
5911 VmaPool_T::~VmaPool_T()
5912 {
5913 }
5914 
5915 #if VMA_STATS_STRING_ENABLED
5916 
5917 #endif // #if VMA_STATS_STRING_ENABLED
5918 
5919 VmaBlockVector::VmaBlockVector(
5920  VmaAllocator hAllocator,
5921  uint32_t memoryTypeIndex,
5922  VkDeviceSize preferredBlockSize,
5923  size_t minBlockCount,
5924  size_t maxBlockCount,
5925  VkDeviceSize bufferImageGranularity,
5926  uint32_t frameInUseCount,
5927  bool isCustomPool) :
5928  m_hAllocator(hAllocator),
5929  m_MemoryTypeIndex(memoryTypeIndex),
5930  m_PreferredBlockSize(preferredBlockSize),
5931  m_MinBlockCount(minBlockCount),
5932  m_MaxBlockCount(maxBlockCount),
5933  m_BufferImageGranularity(bufferImageGranularity),
5934  m_FrameInUseCount(frameInUseCount),
5935  m_IsCustomPool(isCustomPool),
5936  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5937  m_HasEmptyBlock(false),
5938  m_pDefragmentator(VMA_NULL)
5939 {
5940 }
5941 
5942 VmaBlockVector::~VmaBlockVector()
5943 {
5944  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5945 
5946  for(size_t i = m_Blocks.size(); i--; )
5947  {
5948  m_Blocks[i]->Destroy(m_hAllocator);
5949  vma_delete(m_hAllocator, m_Blocks[i]);
5950  }
5951 }
5952 
5953 VkResult VmaBlockVector::CreateMinBlocks()
5954 {
5955  for(size_t i = 0; i < m_MinBlockCount; ++i)
5956  {
5957  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5958  if(res != VK_SUCCESS)
5959  {
5960  return res;
5961  }
5962  }
5963  return VK_SUCCESS;
5964 }
5965 
5966 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
5967 {
5968  pStats->size = 0;
5969  pStats->unusedSize = 0;
5970  pStats->allocationCount = 0;
5971  pStats->unusedRangeCount = 0;
5972  pStats->unusedRangeSizeMax = 0;
5973 
5974  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5975 
5976  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5977  {
5978  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
5979  VMA_ASSERT(pBlock);
5980  VMA_HEAVY_ASSERT(pBlock->Validate());
5981  pBlock->m_Metadata.AddPoolStats(*pStats);
5982  }
5983 }
5984 
5985 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5986 
5987 VkResult VmaBlockVector::Allocate(
5988  VmaPool hCurrentPool,
5989  uint32_t currentFrameIndex,
5990  const VkMemoryRequirements& vkMemReq,
5991  const VmaAllocationCreateInfo& createInfo,
5992  VmaSuballocationType suballocType,
5993  VmaAllocation* pAllocation)
5994 {
5995  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
5996  const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
5997 
5998  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5999 
6000  // 1. Search existing allocations. Try to allocate without making other allocations lost.
6001  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
6002  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6003  {
6004  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
6005  VMA_ASSERT(pCurrBlock);
6006  VmaAllocationRequest currRequest = {};
6007  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6008  currentFrameIndex,
6009  m_FrameInUseCount,
6010  m_BufferImageGranularity,
6011  vkMemReq.size,
6012  vkMemReq.alignment,
6013  suballocType,
6014  false, // canMakeOtherLost
6015  &currRequest))
6016  {
6017  // Allocate from pCurrBlock.
6018  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6019 
6020  if(mapped)
6021  {
6022  VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6023  if(res != VK_SUCCESS)
6024  {
6025  return res;
6026  }
6027  }
6028 
6029  // We no longer have an empty Allocation.
6030  if(pCurrBlock->m_Metadata.IsEmpty())
6031  {
6032  m_HasEmptyBlock = false;
6033  }
6034 
6035  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6036  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6037  (*pAllocation)->InitBlockAllocation(
6038  hCurrentPool,
6039  pCurrBlock,
6040  currRequest.offset,
6041  vkMemReq.alignment,
6042  vkMemReq.size,
6043  suballocType,
6044  mapped,
6045  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
6046  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6047  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
6048  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
6049  return VK_SUCCESS;
6050  }
6051  }
6052 
6053  const bool canCreateNewBlock =
6054  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
6055  (m_Blocks.size() < m_MaxBlockCount);
6056 
6057  // 2. Try to create new block.
6058  if(canCreateNewBlock)
6059  {
6060  // Calculate optimal size for new block.
6061  VkDeviceSize newBlockSize = m_PreferredBlockSize;
6062  uint32_t newBlockSizeShift = 0;
6063  const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6064 
6065  // Allocating blocks of other sizes is allowed only in default pools.
6066  // In custom pools block size is fixed.
6067  if(m_IsCustomPool == false)
6068  {
6069  // Allocate 1/8, 1/4, 1/2 as first blocks.
6070  const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6071  for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6072  {
6073  const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6074  if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6075  {
6076  newBlockSize = smallerNewBlockSize;
6077  ++newBlockSizeShift;
6078  }
6079  else
6080  {
6081  break;
6082  }
6083  }
6084  }
6085 
6086  size_t newBlockIndex = 0;
6087  VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6088  // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize.
6089  if(m_IsCustomPool == false)
6090  {
6091  while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6092  {
6093  const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6094  if(smallerNewBlockSize >= vkMemReq.size)
6095  {
6096  newBlockSize = smallerNewBlockSize;
6097  ++newBlockSizeShift;
6098  res = CreateBlock(newBlockSize, &newBlockIndex);
6099  }
6100  else
6101  {
6102  break;
6103  }
6104  }
6105  }
6106 
6107  if(res == VK_SUCCESS)
6108  {
6109  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
6110  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6111 
6112  if(mapped)
6113  {
6114  res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6115  if(res != VK_SUCCESS)
6116  {
6117  return res;
6118  }
6119  }
6120 
6121  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
6122  VmaAllocationRequest allocRequest;
6123  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6124  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6125  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6126  (*pAllocation)->InitBlockAllocation(
6127  hCurrentPool,
6128  pBlock,
6129  allocRequest.offset,
6130  vkMemReq.alignment,
6131  vkMemReq.size,
6132  suballocType,
6133  mapped,
6134  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
6135  VMA_HEAVY_ASSERT(pBlock->Validate());
6136  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
6137  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
6138  return VK_SUCCESS;
6139  }
6140  }
6141 
6142  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
6143 
6144  // 3. Try to allocate from existing blocks with making other allocations lost.
6145  if(canMakeOtherLost)
6146  {
6147  uint32_t tryIndex = 0;
6148  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6149  {
6150  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6151  VmaAllocationRequest bestRequest = {};
6152  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6153 
6154  // 1. Search existing allocations.
6155  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
6156  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6157  {
6158  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
6159  VMA_ASSERT(pCurrBlock);
6160  VmaAllocationRequest currRequest = {};
6161  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6162  currentFrameIndex,
6163  m_FrameInUseCount,
6164  m_BufferImageGranularity,
6165  vkMemReq.size,
6166  vkMemReq.alignment,
6167  suballocType,
6168  canMakeOtherLost,
6169  &currRequest))
6170  {
6171  const VkDeviceSize currRequestCost = currRequest.CalcCost();
6172  if(pBestRequestBlock == VMA_NULL ||
6173  currRequestCost < bestRequestCost)
6174  {
6175  pBestRequestBlock = pCurrBlock;
6176  bestRequest = currRequest;
6177  bestRequestCost = currRequestCost;
6178 
6179  if(bestRequestCost == 0)
6180  {
6181  break;
6182  }
6183  }
6184  }
6185  }
6186 
6187  if(pBestRequestBlock != VMA_NULL)
6188  {
6189  if(mapped)
6190  {
6191  VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6192  if(res != VK_SUCCESS)
6193  {
6194  return res;
6195  }
6196  }
6197 
6198  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6199  currentFrameIndex,
6200  m_FrameInUseCount,
6201  &bestRequest))
6202  {
6203  // We no longer have an empty Allocation.
6204  if(pBestRequestBlock->m_Metadata.IsEmpty())
6205  {
6206  m_HasEmptyBlock = false;
6207  }
6208  // Allocate from this pBlock.
6209  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6210  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6211  (*pAllocation)->InitBlockAllocation(
6212  hCurrentPool,
6213  pBestRequestBlock,
6214  bestRequest.offset,
6215  vkMemReq.alignment,
6216  vkMemReq.size,
6217  suballocType,
6218  mapped,
6219  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
6220  VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6221  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
6222  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
6223  return VK_SUCCESS;
6224  }
6225  // else: Some allocations must have been touched while we are here. Next try.
6226  }
6227  else
6228  {
6229  // Could not find place in any of the blocks - break outer loop.
6230  break;
6231  }
6232  }
6233  /* Maximum number of tries exceeded - a very unlike event when many other
6234  threads are simultaneously touching allocations making it impossible to make
6235  lost at the same time as we try to allocate. */
6236  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6237  {
6238  return VK_ERROR_TOO_MANY_OBJECTS;
6239  }
6240  }
6241 
6242  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6243 }
6244 
6245 void VmaBlockVector::Free(
6246  VmaAllocation hAllocation)
6247 {
6248  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6249 
6250  // Scope for lock.
6251  {
6252  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6253 
6254  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6255 
6256  if(hAllocation->IsPersistentMap())
6257  {
6258  pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6259  }
6260 
6261  pBlock->m_Metadata.Free(hAllocation);
6262  VMA_HEAVY_ASSERT(pBlock->Validate());
6263 
6264  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
6265 
6266  // pBlock became empty after this deallocation.
6267  if(pBlock->m_Metadata.IsEmpty())
6268  {
6269  // Already has empty Allocation. We don't want to have two, so delete this one.
6270  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6271  {
6272  pBlockToDelete = pBlock;
6273  Remove(pBlock);
6274  }
6275  // We now have first empty Allocation.
6276  else
6277  {
6278  m_HasEmptyBlock = true;
6279  }
6280  }
6281  // pBlock didn't become empty, but we have another empty block - find and free that one.
6282  // (This is optional, heuristics.)
6283  else if(m_HasEmptyBlock)
6284  {
6285  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6286  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6287  {
6288  pBlockToDelete = pLastBlock;
6289  m_Blocks.pop_back();
6290  m_HasEmptyBlock = false;
6291  }
6292  }
6293 
6294  IncrementallySortBlocks();
6295  }
6296 
6297  // Destruction of a free Allocation. Deferred until this point, outside of mutex
6298  // lock, for performance reason.
6299  if(pBlockToDelete != VMA_NULL)
6300  {
6301  VMA_DEBUG_LOG(" Deleted empty allocation");
6302  pBlockToDelete->Destroy(m_hAllocator);
6303  vma_delete(m_hAllocator, pBlockToDelete);
6304  }
6305 }
6306 
6307 size_t VmaBlockVector::CalcMaxBlockSize() const
6308 {
6309  size_t result = 0;
6310  for(size_t i = m_Blocks.size(); i--; )
6311  {
6312  result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
6313  if(result >= m_PreferredBlockSize)
6314  {
6315  break;
6316  }
6317  }
6318  return result;
6319 }
6320 
6321 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6322 {
6323  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6324  {
6325  if(m_Blocks[blockIndex] == pBlock)
6326  {
6327  VmaVectorRemove(m_Blocks, blockIndex);
6328  return;
6329  }
6330  }
6331  VMA_ASSERT(0);
6332 }
6333 
6334 void VmaBlockVector::IncrementallySortBlocks()
6335 {
6336  // Bubble sort only until first swap.
6337  for(size_t i = 1; i < m_Blocks.size(); ++i)
6338  {
6339  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6340  {
6341  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6342  return;
6343  }
6344  }
6345 }
6346 
6347 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
6348 {
6349  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6350  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6351  allocInfo.allocationSize = blockSize;
6352  VkDeviceMemory mem = VK_NULL_HANDLE;
6353  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6354  if(res < 0)
6355  {
6356  return res;
6357  }
6358 
6359  // New VkDeviceMemory successfully created.
6360 
6361  // Create new Allocation for it.
6362  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6363  pBlock->Init(
6364  m_MemoryTypeIndex,
6365  mem,
6366  allocInfo.allocationSize);
6367 
6368  m_Blocks.push_back(pBlock);
6369  if(pNewBlockIndex != VMA_NULL)
6370  {
6371  *pNewBlockIndex = m_Blocks.size() - 1;
6372  }
6373 
6374  return VK_SUCCESS;
6375 }
6376 
6377 #if VMA_STATS_STRING_ENABLED
6378 
6379 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
6380 {
6381  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6382 
6383  json.BeginObject();
6384 
6385  if(m_IsCustomPool)
6386  {
6387  json.WriteString("MemoryTypeIndex");
6388  json.WriteNumber(m_MemoryTypeIndex);
6389 
6390  json.WriteString("BlockSize");
6391  json.WriteNumber(m_PreferredBlockSize);
6392 
6393  json.WriteString("BlockCount");
6394  json.BeginObject(true);
6395  if(m_MinBlockCount > 0)
6396  {
6397  json.WriteString("Min");
6398  json.WriteNumber(m_MinBlockCount);
6399  }
6400  if(m_MaxBlockCount < SIZE_MAX)
6401  {
6402  json.WriteString("Max");
6403  json.WriteNumber(m_MaxBlockCount);
6404  }
6405  json.WriteString("Cur");
6406  json.WriteNumber(m_Blocks.size());
6407  json.EndObject();
6408 
6409  if(m_FrameInUseCount > 0)
6410  {
6411  json.WriteString("FrameInUseCount");
6412  json.WriteNumber(m_FrameInUseCount);
6413  }
6414  }
6415  else
6416  {
6417  json.WriteString("PreferredBlockSize");
6418  json.WriteNumber(m_PreferredBlockSize);
6419  }
6420 
6421  json.WriteString("Blocks");
6422  json.BeginArray();
6423  for(size_t i = 0; i < m_Blocks.size(); ++i)
6424  {
6425  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6426  }
6427  json.EndArray();
6428 
6429  json.EndObject();
6430 }
6431 
6432 #endif // #if VMA_STATS_STRING_ENABLED
6433 
6434 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6435  VmaAllocator hAllocator,
6436  uint32_t currentFrameIndex)
6437 {
6438  if(m_pDefragmentator == VMA_NULL)
6439  {
6440  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6441  hAllocator,
6442  this,
6443  currentFrameIndex);
6444  }
6445 
6446  return m_pDefragmentator;
6447 }
6448 
6449 VkResult VmaBlockVector::Defragment(
6450  VmaDefragmentationStats* pDefragmentationStats,
6451  VkDeviceSize& maxBytesToMove,
6452  uint32_t& maxAllocationsToMove)
6453 {
6454  if(m_pDefragmentator == VMA_NULL)
6455  {
6456  return VK_SUCCESS;
6457  }
6458 
6459  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6460 
6461  // Defragment.
6462  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6463 
6464  // Accumulate statistics.
6465  if(pDefragmentationStats != VMA_NULL)
6466  {
6467  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
6468  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6469  pDefragmentationStats->bytesMoved += bytesMoved;
6470  pDefragmentationStats->allocationsMoved += allocationsMoved;
6471  VMA_ASSERT(bytesMoved <= maxBytesToMove);
6472  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6473  maxBytesToMove -= bytesMoved;
6474  maxAllocationsToMove -= allocationsMoved;
6475  }
6476 
6477  // Free empty blocks.
6478  m_HasEmptyBlock = false;
6479  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
6480  {
6481  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6482  if(pBlock->m_Metadata.IsEmpty())
6483  {
6484  if(m_Blocks.size() > m_MinBlockCount)
6485  {
6486  if(pDefragmentationStats != VMA_NULL)
6487  {
6488  ++pDefragmentationStats->deviceMemoryBlocksFreed;
6489  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
6490  }
6491 
6492  VmaVectorRemove(m_Blocks, blockIndex);
6493  pBlock->Destroy(m_hAllocator);
6494  vma_delete(m_hAllocator, pBlock);
6495  }
6496  else
6497  {
6498  m_HasEmptyBlock = true;
6499  }
6500  }
6501  }
6502 
6503  return result;
6504 }
6505 
6506 void VmaBlockVector::DestroyDefragmentator()
6507 {
6508  if(m_pDefragmentator != VMA_NULL)
6509  {
6510  vma_delete(m_hAllocator, m_pDefragmentator);
6511  m_pDefragmentator = VMA_NULL;
6512  }
6513 }
6514 
6515 void VmaBlockVector::MakePoolAllocationsLost(
6516  uint32_t currentFrameIndex,
6517  size_t* pLostAllocationCount)
6518 {
6519  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6520  size_t lostAllocationCount = 0;
6521  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6522  {
6523  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
6524  VMA_ASSERT(pBlock);
6525  lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6526  }
6527  if(pLostAllocationCount != VMA_NULL)
6528  {
6529  *pLostAllocationCount = lostAllocationCount;
6530  }
6531 }
6532 
6533 void VmaBlockVector::AddStats(VmaStats* pStats)
6534 {
6535  const uint32_t memTypeIndex = m_MemoryTypeIndex;
6536  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6537 
6538  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6539 
6540  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6541  {
6542  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
6543  VMA_ASSERT(pBlock);
6544  VMA_HEAVY_ASSERT(pBlock->Validate());
6545  VmaStatInfo allocationStatInfo;
6546  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6547  VmaAddStatInfo(pStats->total, allocationStatInfo);
6548  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
6549  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
6550  }
6551 }
6552 
6554 // VmaDefragmentator members definition
6555 
6556 VmaDefragmentator::VmaDefragmentator(
6557  VmaAllocator hAllocator,
6558  VmaBlockVector* pBlockVector,
6559  uint32_t currentFrameIndex) :
6560  m_hAllocator(hAllocator),
6561  m_pBlockVector(pBlockVector),
6562  m_CurrentFrameIndex(currentFrameIndex),
6563  m_BytesMoved(0),
6564  m_AllocationsMoved(0),
6565  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6566  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6567 {
6568 }
6569 
6570 VmaDefragmentator::~VmaDefragmentator()
6571 {
6572  for(size_t i = m_Blocks.size(); i--; )
6573  {
6574  vma_delete(m_hAllocator, m_Blocks[i]);
6575  }
6576 }
6577 
6578 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6579 {
6580  AllocationInfo allocInfo;
6581  allocInfo.m_hAllocation = hAlloc;
6582  allocInfo.m_pChanged = pChanged;
6583  m_Allocations.push_back(allocInfo);
6584 }
6585 
6586 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
6587 {
6588  // It has already been mapped for defragmentation.
6589  if(m_pMappedDataForDefragmentation)
6590  {
6591  *ppMappedData = m_pMappedDataForDefragmentation;
6592  return VK_SUCCESS;
6593  }
6594 
6595  // It is originally mapped.
6596  if(m_pBlock->m_Mapping.GetMappedData())
6597  {
6598  *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6599  return VK_SUCCESS;
6600  }
6601 
6602  // Map on first usage.
6603  VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6604  *ppMappedData = m_pMappedDataForDefragmentation;
6605  return res;
6606 }
6607 
6608 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6609 {
6610  if(m_pMappedDataForDefragmentation != VMA_NULL)
6611  {
6612  m_pBlock->Unmap(hAllocator, 1);
6613  }
6614 }
6615 
6616 VkResult VmaDefragmentator::DefragmentRound(
6617  VkDeviceSize maxBytesToMove,
6618  uint32_t maxAllocationsToMove)
6619 {
6620  if(m_Blocks.empty())
6621  {
6622  return VK_SUCCESS;
6623  }
6624 
6625  size_t srcBlockIndex = m_Blocks.size() - 1;
6626  size_t srcAllocIndex = SIZE_MAX;
6627  for(;;)
6628  {
6629  // 1. Find next allocation to move.
6630  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
6631  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
6632  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6633  {
6634  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6635  {
6636  // Finished: no more allocations to process.
6637  if(srcBlockIndex == 0)
6638  {
6639  return VK_SUCCESS;
6640  }
6641  else
6642  {
6643  --srcBlockIndex;
6644  srcAllocIndex = SIZE_MAX;
6645  }
6646  }
6647  else
6648  {
6649  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6650  }
6651  }
6652 
6653  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6654  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6655 
6656  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6657  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6658  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6659  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6660 
6661  // 2. Try to find new place for this allocation in preceding or current block.
6662  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6663  {
6664  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6665  VmaAllocationRequest dstAllocRequest;
6666  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6667  m_CurrentFrameIndex,
6668  m_pBlockVector->GetFrameInUseCount(),
6669  m_pBlockVector->GetBufferImageGranularity(),
6670  size,
6671  alignment,
6672  suballocType,
6673  false, // canMakeOtherLost
6674  &dstAllocRequest) &&
6675  MoveMakesSense(
6676  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6677  {
6678  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6679 
6680  // Reached limit on number of allocations or bytes to move.
6681  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6682  (m_BytesMoved + size > maxBytesToMove))
6683  {
6684  return VK_INCOMPLETE;
6685  }
6686 
6687  void* pDstMappedData = VMA_NULL;
6688  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6689  if(res != VK_SUCCESS)
6690  {
6691  return res;
6692  }
6693 
6694  void* pSrcMappedData = VMA_NULL;
6695  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6696  if(res != VK_SUCCESS)
6697  {
6698  return res;
6699  }
6700 
6701  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
6702  memcpy(
6703  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6704  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6705  static_cast<size_t>(size));
6706 
6707  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6708  pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6709 
6710  allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6711 
6712  if(allocInfo.m_pChanged != VMA_NULL)
6713  {
6714  *allocInfo.m_pChanged = VK_TRUE;
6715  }
6716 
6717  ++m_AllocationsMoved;
6718  m_BytesMoved += size;
6719 
6720  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6721 
6722  break;
6723  }
6724  }
6725 
6726  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
6727 
6728  if(srcAllocIndex > 0)
6729  {
6730  --srcAllocIndex;
6731  }
6732  else
6733  {
6734  if(srcBlockIndex > 0)
6735  {
6736  --srcBlockIndex;
6737  srcAllocIndex = SIZE_MAX;
6738  }
6739  else
6740  {
6741  return VK_SUCCESS;
6742  }
6743  }
6744  }
6745 }
6746 
6747 VkResult VmaDefragmentator::Defragment(
6748  VkDeviceSize maxBytesToMove,
6749  uint32_t maxAllocationsToMove)
6750 {
6751  if(m_Allocations.empty())
6752  {
6753  return VK_SUCCESS;
6754  }
6755 
6756  // Create block info for each block.
6757  const size_t blockCount = m_pBlockVector->m_Blocks.size();
6758  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6759  {
6760  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6761  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6762  m_Blocks.push_back(pBlockInfo);
6763  }
6764 
6765  // Sort them by m_pBlock pointer value.
6766  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6767 
6768  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
6769  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6770  {
6771  AllocationInfo& allocInfo = m_Allocations[blockIndex];
6772  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
6773  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6774  {
6775  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6776  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6777  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6778  {
6779  (*it)->m_Allocations.push_back(allocInfo);
6780  }
6781  else
6782  {
6783  VMA_ASSERT(0);
6784  }
6785  }
6786  }
6787  m_Allocations.clear();
6788 
6789  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6790  {
6791  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6792  pBlockInfo->CalcHasNonMovableAllocations();
6793  pBlockInfo->SortAllocationsBySizeDescecnding();
6794  }
6795 
6796  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
6797  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6798 
6799  // Execute defragmentation rounds (the main part).
6800  VkResult result = VK_SUCCESS;
6801  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6802  {
6803  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6804  }
6805 
6806  // Unmap blocks that were mapped for defragmentation.
6807  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6808  {
6809  m_Blocks[blockIndex]->Unmap(m_hAllocator);
6810  }
6811 
6812  return result;
6813 }
6814 
6815 bool VmaDefragmentator::MoveMakesSense(
6816  size_t dstBlockIndex, VkDeviceSize dstOffset,
6817  size_t srcBlockIndex, VkDeviceSize srcOffset)
6818 {
6819  if(dstBlockIndex < srcBlockIndex)
6820  {
6821  return true;
6822  }
6823  if(dstBlockIndex > srcBlockIndex)
6824  {
6825  return false;
6826  }
6827  if(dstOffset < srcOffset)
6828  {
6829  return true;
6830  }
6831  return false;
6832 }
6833 
6835 // VmaAllocator_T
6836 
6837 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
6838  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
6839  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
6840  m_hDevice(pCreateInfo->device),
6841  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6842  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6843  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6844  m_PreferredLargeHeapBlockSize(0),
6845  m_PhysicalDevice(pCreateInfo->physicalDevice),
6846  m_CurrentFrameIndex(0),
6847  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6848 {
6849  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
6850 
6851  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
6852  memset(&m_MemProps, 0, sizeof(m_MemProps));
6853  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
6854 
6855  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
6856  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
6857 
6858  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6859  {
6860  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6861  }
6862 
6863  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
6864  {
6865  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
6866  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
6867  }
6868 
6869  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
6870 
6871  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6872  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6873 
6874  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
6875  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
6876 
6877  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
6878  {
6879  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6880  {
6881  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
6882  if(limit != VK_WHOLE_SIZE)
6883  {
6884  m_HeapSizeLimit[heapIndex] = limit;
6885  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6886  {
6887  m_MemProps.memoryHeaps[heapIndex].size = limit;
6888  }
6889  }
6890  }
6891  }
6892 
6893  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6894  {
6895  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6896 
6897  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
6898  this,
6899  memTypeIndex,
6900  preferredBlockSize,
6901  0,
6902  SIZE_MAX,
6903  GetBufferImageGranularity(),
6904  pCreateInfo->frameInUseCount,
6905  false); // isCustomPool
6906  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
6907  // becase minBlockCount is 0.
6908  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6909  }
6910 }
6911 
6912 VmaAllocator_T::~VmaAllocator_T()
6913 {
6914  VMA_ASSERT(m_Pools.empty());
6915 
6916  for(size_t i = GetMemoryTypeCount(); i--; )
6917  {
6918  vma_delete(this, m_pDedicatedAllocations[i]);
6919  vma_delete(this, m_pBlockVectors[i]);
6920  }
6921 }
6922 
6923 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
6924 {
6925 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
6926  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6927  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6928  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6929  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6930  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6931  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6932  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6933  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6934  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6935  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6936  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6937  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6938  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6939  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6940  if(m_UseKhrDedicatedAllocation)
6941  {
6942  m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
6943  (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
6944  m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
6945  (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
6946  }
6947 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
6948 
6949 #define VMA_COPY_IF_NOT_NULL(funcName) \
6950  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
6951 
6952  if(pVulkanFunctions != VMA_NULL)
6953  {
6954  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6955  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6956  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6957  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6958  VMA_COPY_IF_NOT_NULL(vkMapMemory);
6959  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6960  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6961  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6962  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6963  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6964  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6965  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6966  VMA_COPY_IF_NOT_NULL(vkCreateImage);
6967  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6968  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6969  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6970  }
6971 
6972 #undef VMA_COPY_IF_NOT_NULL
6973 
6974  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
6975  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
6976  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6977  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6978  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6979  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6980  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6981  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6982  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6983  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6984  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6985  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6986  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6987  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6988  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6989  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6990  if(m_UseKhrDedicatedAllocation)
6991  {
6992  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6993  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6994  }
6995 }
6996 
6997 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6998 {
6999  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7000  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7001  const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7002  return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7003 }
7004 
7005 VkResult VmaAllocator_T::AllocateMemoryOfType(
7006  const VkMemoryRequirements& vkMemReq,
7007  bool dedicatedAllocation,
7008  VkBuffer dedicatedBuffer,
7009  VkImage dedicatedImage,
7010  const VmaAllocationCreateInfo& createInfo,
7011  uint32_t memTypeIndex,
7012  VmaSuballocationType suballocType,
7013  VmaAllocation* pAllocation)
7014 {
7015  VMA_ASSERT(pAllocation != VMA_NULL);
7016  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7017 
7018  VmaAllocationCreateInfo finalCreateInfo = createInfo;
7019 
7020  // If memory type is not HOST_VISIBLE, disable MAPPED.
7021  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
7022  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7023  {
7024  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
7025  }
7026 
7027  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
7028  VMA_ASSERT(blockVector);
7029 
7030  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7031  bool preferDedicatedMemory =
7032  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7033  dedicatedAllocation ||
7034  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
7035  vkMemReq.size > preferredBlockSize / 2;
7036 
7037  if(preferDedicatedMemory &&
7038  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
7039  finalCreateInfo.pool == VK_NULL_HANDLE)
7040  {
7042  }
7043 
7044  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
7045  {
7046  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7047  {
7048  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7049  }
7050  else
7051  {
7052  return AllocateDedicatedMemory(
7053  vkMemReq.size,
7054  suballocType,
7055  memTypeIndex,
7056  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
7057  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
7058  finalCreateInfo.pUserData,
7059  dedicatedBuffer,
7060  dedicatedImage,
7061  pAllocation);
7062  }
7063  }
7064  else
7065  {
7066  VkResult res = blockVector->Allocate(
7067  VK_NULL_HANDLE, // hCurrentPool
7068  m_CurrentFrameIndex.load(),
7069  vkMemReq,
7070  finalCreateInfo,
7071  suballocType,
7072  pAllocation);
7073  if(res == VK_SUCCESS)
7074  {
7075  return res;
7076  }
7077 
7078  // 5. Try dedicated memory.
7079  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7080  {
7081  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7082  }
7083  else
7084  {
7085  res = AllocateDedicatedMemory(
7086  vkMemReq.size,
7087  suballocType,
7088  memTypeIndex,
7089  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
7090  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
7091  finalCreateInfo.pUserData,
7092  dedicatedBuffer,
7093  dedicatedImage,
7094  pAllocation);
7095  if(res == VK_SUCCESS)
7096  {
7097  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
7098  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
7099  return VK_SUCCESS;
7100  }
7101  else
7102  {
7103  // Everything failed: Return error code.
7104  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
7105  return res;
7106  }
7107  }
7108  }
7109 }
7110 
7111 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7112  VkDeviceSize size,
7113  VmaSuballocationType suballocType,
7114  uint32_t memTypeIndex,
7115  bool map,
7116  bool isUserDataString,
7117  void* pUserData,
7118  VkBuffer dedicatedBuffer,
7119  VkImage dedicatedImage,
7120  VmaAllocation* pAllocation)
7121 {
7122  VMA_ASSERT(pAllocation);
7123 
7124  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7125  allocInfo.memoryTypeIndex = memTypeIndex;
7126  allocInfo.allocationSize = size;
7127 
7128  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7129  if(m_UseKhrDedicatedAllocation)
7130  {
7131  if(dedicatedBuffer != VK_NULL_HANDLE)
7132  {
7133  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7134  dedicatedAllocInfo.buffer = dedicatedBuffer;
7135  allocInfo.pNext = &dedicatedAllocInfo;
7136  }
7137  else if(dedicatedImage != VK_NULL_HANDLE)
7138  {
7139  dedicatedAllocInfo.image = dedicatedImage;
7140  allocInfo.pNext = &dedicatedAllocInfo;
7141  }
7142  }
7143 
7144  // Allocate VkDeviceMemory.
7145  VkDeviceMemory hMemory = VK_NULL_HANDLE;
7146  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7147  if(res < 0)
7148  {
7149  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
7150  return res;
7151  }
7152 
7153  void* pMappedData = VMA_NULL;
7154  if(map)
7155  {
7156  res = (*m_VulkanFunctions.vkMapMemory)(
7157  m_hDevice,
7158  hMemory,
7159  0,
7160  VK_WHOLE_SIZE,
7161  0,
7162  &pMappedData);
7163  if(res < 0)
7164  {
7165  VMA_DEBUG_LOG(" vkMapMemory FAILED");
7166  FreeVulkanMemory(memTypeIndex, size, hMemory);
7167  return res;
7168  }
7169  }
7170 
7171  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7172  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7173  (*pAllocation)->SetUserData(this, pUserData);
7174 
7175  // Register it in m_pDedicatedAllocations.
7176  {
7177  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7178  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7179  VMA_ASSERT(pDedicatedAllocations);
7180  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7181  }
7182 
7183  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7184 
7185  return VK_SUCCESS;
7186 }
7187 
7188 void VmaAllocator_T::GetBufferMemoryRequirements(
7189  VkBuffer hBuffer,
7190  VkMemoryRequirements& memReq,
7191  bool& requiresDedicatedAllocation,
7192  bool& prefersDedicatedAllocation) const
7193 {
7194  if(m_UseKhrDedicatedAllocation)
7195  {
7196  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7197  memReqInfo.buffer = hBuffer;
7198 
7199  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7200 
7201  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7202  memReq2.pNext = &memDedicatedReq;
7203 
7204  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7205 
7206  memReq = memReq2.memoryRequirements;
7207  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7208  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7209  }
7210  else
7211  {
7212  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7213  requiresDedicatedAllocation = false;
7214  prefersDedicatedAllocation = false;
7215  }
7216 }
7217 
7218 void VmaAllocator_T::GetImageMemoryRequirements(
7219  VkImage hImage,
7220  VkMemoryRequirements& memReq,
7221  bool& requiresDedicatedAllocation,
7222  bool& prefersDedicatedAllocation) const
7223 {
7224  if(m_UseKhrDedicatedAllocation)
7225  {
7226  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7227  memReqInfo.image = hImage;
7228 
7229  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7230 
7231  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7232  memReq2.pNext = &memDedicatedReq;
7233 
7234  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7235 
7236  memReq = memReq2.memoryRequirements;
7237  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7238  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7239  }
7240  else
7241  {
7242  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7243  requiresDedicatedAllocation = false;
7244  prefersDedicatedAllocation = false;
7245  }
7246 }
7247 
7248 VkResult VmaAllocator_T::AllocateMemory(
7249  const VkMemoryRequirements& vkMemReq,
7250  bool requiresDedicatedAllocation,
7251  bool prefersDedicatedAllocation,
7252  VkBuffer dedicatedBuffer,
7253  VkImage dedicatedImage,
7254  const VmaAllocationCreateInfo& createInfo,
7255  VmaSuballocationType suballocType,
7256  VmaAllocation* pAllocation)
7257 {
7258  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
7259  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7260  {
7261  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7262  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7263  }
7264  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
7266  {
7267  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7268  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7269  }
7270  if(requiresDedicatedAllocation)
7271  {
7272  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7273  {
7274  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7275  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7276  }
7277  if(createInfo.pool != VK_NULL_HANDLE)
7278  {
7279  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
7280  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7281  }
7282  }
7283  if((createInfo.pool != VK_NULL_HANDLE) &&
7284  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
7285  {
7286  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7287  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7288  }
7289 
7290  if(createInfo.pool != VK_NULL_HANDLE)
7291  {
7292  return createInfo.pool->m_BlockVector.Allocate(
7293  createInfo.pool,
7294  m_CurrentFrameIndex.load(),
7295  vkMemReq,
7296  createInfo,
7297  suballocType,
7298  pAllocation);
7299  }
7300  else
7301  {
7302  // Bit mask of memory Vulkan types acceptable for this allocation.
7303  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7304  uint32_t memTypeIndex = UINT32_MAX;
7305  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
7306  if(res == VK_SUCCESS)
7307  {
7308  res = AllocateMemoryOfType(
7309  vkMemReq,
7310  requiresDedicatedAllocation || prefersDedicatedAllocation,
7311  dedicatedBuffer,
7312  dedicatedImage,
7313  createInfo,
7314  memTypeIndex,
7315  suballocType,
7316  pAllocation);
7317  // Succeeded on first try.
7318  if(res == VK_SUCCESS)
7319  {
7320  return res;
7321  }
7322  // Allocation from this memory type failed. Try other compatible memory types.
7323  else
7324  {
7325  for(;;)
7326  {
7327  // Remove old memTypeIndex from list of possibilities.
7328  memoryTypeBits &= ~(1u << memTypeIndex);
7329  // Find alternative memTypeIndex.
7330  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
7331  if(res == VK_SUCCESS)
7332  {
7333  res = AllocateMemoryOfType(
7334  vkMemReq,
7335  requiresDedicatedAllocation || prefersDedicatedAllocation,
7336  dedicatedBuffer,
7337  dedicatedImage,
7338  createInfo,
7339  memTypeIndex,
7340  suballocType,
7341  pAllocation);
7342  // Allocation from this alternative memory type succeeded.
7343  if(res == VK_SUCCESS)
7344  {
7345  return res;
7346  }
7347  // else: Allocation from this memory type failed. Try next one - next loop iteration.
7348  }
7349  // No other matching memory type index could be found.
7350  else
7351  {
7352  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
7353  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7354  }
7355  }
7356  }
7357  }
7358  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
7359  else
7360  return res;
7361  }
7362 }
7363 
7364 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
7365 {
7366  VMA_ASSERT(allocation);
7367 
7368  if(allocation->CanBecomeLost() == false ||
7369  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7370  {
7371  switch(allocation->GetType())
7372  {
7373  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7374  {
7375  VmaBlockVector* pBlockVector = VMA_NULL;
7376  VmaPool hPool = allocation->GetPool();
7377  if(hPool != VK_NULL_HANDLE)
7378  {
7379  pBlockVector = &hPool->m_BlockVector;
7380  }
7381  else
7382  {
7383  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7384  pBlockVector = m_pBlockVectors[memTypeIndex];
7385  }
7386  pBlockVector->Free(allocation);
7387  }
7388  break;
7389  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7390  FreeDedicatedMemory(allocation);
7391  break;
7392  default:
7393  VMA_ASSERT(0);
7394  }
7395  }
7396 
7397  allocation->SetUserData(this, VMA_NULL);
7398  vma_delete(this, allocation);
7399 }
7400 
7401 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
7402 {
7403  // Initialize.
7404  InitStatInfo(pStats->total);
7405  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7406  InitStatInfo(pStats->memoryType[i]);
7407  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7408  InitStatInfo(pStats->memoryHeap[i]);
7409 
7410  // Process default pools.
7411  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7412  {
7413  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
7414  VMA_ASSERT(pBlockVector);
7415  pBlockVector->AddStats(pStats);
7416  }
7417 
7418  // Process custom pools.
7419  {
7420  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7421  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7422  {
7423  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7424  }
7425  }
7426 
7427  // Process dedicated allocations.
7428  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7429  {
7430  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7431  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7432  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7433  VMA_ASSERT(pDedicatedAllocVector);
7434  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7435  {
7436  VmaStatInfo allocationStatInfo;
7437  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7438  VmaAddStatInfo(pStats->total, allocationStatInfo);
7439  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
7440  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
7441  }
7442  }
7443 
7444  // Postprocess.
7445  VmaPostprocessCalcStatInfo(pStats->total);
7446  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
7447  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
7448  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
7449  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
7450 }
7451 
7452 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7453 
7454 VkResult VmaAllocator_T::Defragment(
7455  VmaAllocation* pAllocations,
7456  size_t allocationCount,
7457  VkBool32* pAllocationsChanged,
7458  const VmaDefragmentationInfo* pDefragmentationInfo,
7459  VmaDefragmentationStats* pDefragmentationStats)
7460 {
7461  if(pAllocationsChanged != VMA_NULL)
7462  {
7463  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
7464  }
7465  if(pDefragmentationStats != VMA_NULL)
7466  {
7467  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
7468  }
7469 
7470  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7471 
7472  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7473 
7474  const size_t poolCount = m_Pools.size();
7475 
7476  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
7477  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7478  {
7479  VmaAllocation hAlloc = pAllocations[allocIndex];
7480  VMA_ASSERT(hAlloc);
7481  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7482  // DedicatedAlloc cannot be defragmented.
7483  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7484  // Only HOST_VISIBLE memory types can be defragmented.
7485  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7486  // Lost allocation cannot be defragmented.
7487  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7488  {
7489  VmaBlockVector* pAllocBlockVector = VMA_NULL;
7490 
7491  const VmaPool hAllocPool = hAlloc->GetPool();
7492  // This allocation belongs to custom pool.
7493  if(hAllocPool != VK_NULL_HANDLE)
7494  {
7495  pAllocBlockVector = &hAllocPool->GetBlockVector();
7496  }
7497  // This allocation belongs to general pool.
7498  else
7499  {
7500  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7501  }
7502 
7503  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
7504 
7505  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
7506  &pAllocationsChanged[allocIndex] : VMA_NULL;
7507  pDefragmentator->AddAllocation(hAlloc, pChanged);
7508  }
7509  }
7510 
7511  VkResult result = VK_SUCCESS;
7512 
7513  // ======== Main processing.
7514 
7515  VkDeviceSize maxBytesToMove = SIZE_MAX;
7516  uint32_t maxAllocationsToMove = UINT32_MAX;
7517  if(pDefragmentationInfo != VMA_NULL)
7518  {
7519  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
7520  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
7521  }
7522 
7523  // Process standard memory.
7524  for(uint32_t memTypeIndex = 0;
7525  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7526  ++memTypeIndex)
7527  {
7528  // Only HOST_VISIBLE memory types can be defragmented.
7529  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7530  {
7531  result = m_pBlockVectors[memTypeIndex]->Defragment(
7532  pDefragmentationStats,
7533  maxBytesToMove,
7534  maxAllocationsToMove);
7535  }
7536  }
7537 
7538  // Process custom pools.
7539  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7540  {
7541  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7542  pDefragmentationStats,
7543  maxBytesToMove,
7544  maxAllocationsToMove);
7545  }
7546 
7547  // ======== Destroy defragmentators.
7548 
7549  // Process custom pools.
7550  for(size_t poolIndex = poolCount; poolIndex--; )
7551  {
7552  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7553  }
7554 
7555  // Process standard memory.
7556  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7557  {
7558  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7559  {
7560  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7561  }
7562  }
7563 
7564  return result;
7565 }
7566 
7567 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
7568 {
7569  if(hAllocation->CanBecomeLost())
7570  {
7571  /*
7572  Warning: This is a carefully designed algorithm.
7573  Do not modify unless you really know what you're doing :)
7574  */
7575  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7576  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7577  for(;;)
7578  {
7579  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7580  {
7581  pAllocationInfo->memoryType = UINT32_MAX;
7582  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
7583  pAllocationInfo->offset = 0;
7584  pAllocationInfo->size = hAllocation->GetSize();
7585  pAllocationInfo->pMappedData = VMA_NULL;
7586  pAllocationInfo->pUserData = hAllocation->GetUserData();
7587  return;
7588  }
7589  else if(localLastUseFrameIndex == localCurrFrameIndex)
7590  {
7591  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
7592  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
7593  pAllocationInfo->offset = hAllocation->GetOffset();
7594  pAllocationInfo->size = hAllocation->GetSize();
7595  pAllocationInfo->pMappedData = VMA_NULL;
7596  pAllocationInfo->pUserData = hAllocation->GetUserData();
7597  return;
7598  }
7599  else // Last use time earlier than current time.
7600  {
7601  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7602  {
7603  localLastUseFrameIndex = localCurrFrameIndex;
7604  }
7605  }
7606  }
7607  }
7608  else
7609  {
7610  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
7611  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
7612  pAllocationInfo->offset = hAllocation->GetOffset();
7613  pAllocationInfo->size = hAllocation->GetSize();
7614  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
7615  pAllocationInfo->pUserData = hAllocation->GetUserData();
7616  }
7617 }
7618 
7619 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7620 {
7621  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
7622 
7623  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
7624 
7625  if(newCreateInfo.maxBlockCount == 0)
7626  {
7627  newCreateInfo.maxBlockCount = SIZE_MAX;
7628  }
7629  if(newCreateInfo.blockSize == 0)
7630  {
7631  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
7632  }
7633 
7634  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
7635 
7636  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7637  if(res != VK_SUCCESS)
7638  {
7639  vma_delete(this, *pPool);
7640  *pPool = VMA_NULL;
7641  return res;
7642  }
7643 
7644  // Add to m_Pools.
7645  {
7646  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7647  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7648  }
7649 
7650  return VK_SUCCESS;
7651 }
7652 
7653 void VmaAllocator_T::DestroyPool(VmaPool pool)
7654 {
7655  // Remove from m_Pools.
7656  {
7657  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7658  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7659  VMA_ASSERT(success && "Pool not found in Allocator.");
7660  }
7661 
7662  vma_delete(this, pool);
7663 }
7664 
7665 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
7666 {
7667  pool->m_BlockVector.GetPoolStats(pPoolStats);
7668 }
7669 
7670 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7671 {
7672  m_CurrentFrameIndex.store(frameIndex);
7673 }
7674 
7675 void VmaAllocator_T::MakePoolAllocationsLost(
7676  VmaPool hPool,
7677  size_t* pLostAllocationCount)
7678 {
7679  hPool->m_BlockVector.MakePoolAllocationsLost(
7680  m_CurrentFrameIndex.load(),
7681  pLostAllocationCount);
7682 }
7683 
7684 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7685 {
7686  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
7687  (*pAllocation)->InitLost();
7688 }
7689 
7690 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7691 {
7692  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7693 
7694  VkResult res;
7695  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7696  {
7697  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7698  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7699  {
7700  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7701  if(res == VK_SUCCESS)
7702  {
7703  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7704  }
7705  }
7706  else
7707  {
7708  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7709  }
7710  }
7711  else
7712  {
7713  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7714  }
7715 
7716  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
7717  {
7718  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7719  }
7720 
7721  return res;
7722 }
7723 
7724 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7725 {
7726  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
7727  {
7728  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
7729  }
7730 
7731  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7732 
7733  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7734  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7735  {
7736  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7737  m_HeapSizeLimit[heapIndex] += size;
7738  }
7739 }
7740 
7741 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
7742 {
7743  if(hAllocation->CanBecomeLost())
7744  {
7745  return VK_ERROR_MEMORY_MAP_FAILED;
7746  }
7747 
7748  switch(hAllocation->GetType())
7749  {
7750  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7751  {
7752  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
7753  char *pBytes = VMA_NULL;
7754  VkResult res = pBlock->Map(this, 1, (void**)&pBytes);
7755  if(res == VK_SUCCESS)
7756  {
7757  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7758  hAllocation->BlockAllocMap();
7759  }
7760  return res;
7761  }
7762  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7763  return hAllocation->DedicatedAllocMap(this, ppData);
7764  default:
7765  VMA_ASSERT(0);
7766  return VK_ERROR_MEMORY_MAP_FAILED;
7767  }
7768 }
7769 
7770 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7771 {
7772  switch(hAllocation->GetType())
7773  {
7774  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7775  {
7776  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
7777  hAllocation->BlockAllocUnmap();
7778  pBlock->Unmap(this, 1);
7779  }
7780  break;
7781  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7782  hAllocation->DedicatedAllocUnmap(this);
7783  break;
7784  default:
7785  VMA_ASSERT(0);
7786  }
7787 }
7788 
7789 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7790 {
7791  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7792 
7793  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7794  {
7795  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7796  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7797  VMA_ASSERT(pDedicatedAllocations);
7798  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7799  VMA_ASSERT(success);
7800  }
7801 
7802  VkDeviceMemory hMemory = allocation->GetMemory();
7803 
7804  if(allocation->GetMappedData() != VMA_NULL)
7805  {
7806  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7807  }
7808 
7809  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7810 
7811  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7812 }
7813 
7814 #if VMA_STATS_STRING_ENABLED
7815 
7816 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7817 {
7818  bool dedicatedAllocationsStarted = false;
7819  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7820  {
7821  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7822  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7823  VMA_ASSERT(pDedicatedAllocVector);
7824  if(pDedicatedAllocVector->empty() == false)
7825  {
7826  if(dedicatedAllocationsStarted == false)
7827  {
7828  dedicatedAllocationsStarted = true;
7829  json.WriteString("DedicatedAllocations");
7830  json.BeginObject();
7831  }
7832 
7833  json.BeginString("Type ");
7834  json.ContinueString(memTypeIndex);
7835  json.EndString();
7836 
7837  json.BeginArray();
7838 
7839  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7840  {
7841  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7842  json.BeginObject(true);
7843 
7844  json.WriteString("Type");
7845  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7846 
7847  json.WriteString("Size");
7848  json.WriteNumber(hAlloc->GetSize());
7849 
7850  const void* pUserData = hAlloc->GetUserData();
7851  if(pUserData != VMA_NULL)
7852  {
7853  json.WriteString("UserData");
7854  if(hAlloc->IsUserDataString())
7855  {
7856  json.WriteString((const char*)pUserData);
7857  }
7858  else
7859  {
7860  json.BeginString();
7861  json.ContinueString_Pointer(pUserData);
7862  json.EndString();
7863  }
7864  }
7865 
7866  json.EndObject();
7867  }
7868 
7869  json.EndArray();
7870  }
7871  }
7872  if(dedicatedAllocationsStarted)
7873  {
7874  json.EndObject();
7875  }
7876 
7877  {
7878  bool allocationsStarted = false;
7879  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7880  {
7881  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
7882  {
7883  if(allocationsStarted == false)
7884  {
7885  allocationsStarted = true;
7886  json.WriteString("DefaultPools");
7887  json.BeginObject();
7888  }
7889 
7890  json.BeginString("Type ");
7891  json.ContinueString(memTypeIndex);
7892  json.EndString();
7893 
7894  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7895  }
7896  }
7897  if(allocationsStarted)
7898  {
7899  json.EndObject();
7900  }
7901  }
7902 
7903  {
7904  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7905  const size_t poolCount = m_Pools.size();
7906  if(poolCount > 0)
7907  {
7908  json.WriteString("Pools");
7909  json.BeginArray();
7910  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7911  {
7912  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7913  }
7914  json.EndArray();
7915  }
7916  }
7917 }
7918 
7919 #endif // #if VMA_STATS_STRING_ENABLED
7920 
7921 static VkResult AllocateMemoryForImage(
7922  VmaAllocator allocator,
7923  VkImage image,
7924  const VmaAllocationCreateInfo* pAllocationCreateInfo,
7925  VmaSuballocationType suballocType,
7926  VmaAllocation* pAllocation)
7927 {
7928  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7929 
7930  VkMemoryRequirements vkMemReq = {};
7931  bool requiresDedicatedAllocation = false;
7932  bool prefersDedicatedAllocation = false;
7933  allocator->GetImageMemoryRequirements(image, vkMemReq,
7934  requiresDedicatedAllocation, prefersDedicatedAllocation);
7935 
7936  return allocator->AllocateMemory(
7937  vkMemReq,
7938  requiresDedicatedAllocation,
7939  prefersDedicatedAllocation,
7940  VK_NULL_HANDLE, // dedicatedBuffer
7941  image, // dedicatedImage
7942  *pAllocationCreateInfo,
7943  suballocType,
7944  pAllocation);
7945 }
7946 
7948 // Public interface
7949 
7950 VkResult vmaCreateAllocator(
7951  const VmaAllocatorCreateInfo* pCreateInfo,
7952  VmaAllocator* pAllocator)
7953 {
7954  VMA_ASSERT(pCreateInfo && pAllocator);
7955  VMA_DEBUG_LOG("vmaCreateAllocator");
7956  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
7957  return VK_SUCCESS;
7958 }
7959 
7960 void vmaDestroyAllocator(
7961  VmaAllocator allocator)
7962 {
7963  if(allocator != VK_NULL_HANDLE)
7964  {
7965  VMA_DEBUG_LOG("vmaDestroyAllocator");
7966  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7967  vma_delete(&allocationCallbacks, allocator);
7968  }
7969 }
7970 
7972  VmaAllocator allocator,
7973  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7974 {
7975  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7976  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7977 }
7978 
7980  VmaAllocator allocator,
7981  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7982 {
7983  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7984  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7985 }
7986 
7988  VmaAllocator allocator,
7989  uint32_t memoryTypeIndex,
7990  VkMemoryPropertyFlags* pFlags)
7991 {
7992  VMA_ASSERT(allocator && pFlags);
7993  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7994  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7995 }
7996 
7998  VmaAllocator allocator,
7999  uint32_t frameIndex)
8000 {
8001  VMA_ASSERT(allocator);
8002  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8003 
8004  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8005 
8006  allocator->SetCurrentFrameIndex(frameIndex);
8007 }
8008 
8009 void vmaCalculateStats(
8010  VmaAllocator allocator,
8011  VmaStats* pStats)
8012 {
8013  VMA_ASSERT(allocator && pStats);
8014  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8015  allocator->CalculateStats(pStats);
8016 }
8017 
8018 #if VMA_STATS_STRING_ENABLED
8019 
8020 void vmaBuildStatsString(
8021  VmaAllocator allocator,
8022  char** ppStatsString,
8023  VkBool32 detailedMap)
8024 {
8025  VMA_ASSERT(allocator && ppStatsString);
8026  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8027 
8028  VmaStringBuilder sb(allocator);
8029  {
8030  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8031  json.BeginObject();
8032 
8033  VmaStats stats;
8034  allocator->CalculateStats(&stats);
8035 
8036  json.WriteString("Total");
8037  VmaPrintStatInfo(json, stats.total);
8038 
8039  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8040  {
8041  json.BeginString("Heap ");
8042  json.ContinueString(heapIndex);
8043  json.EndString();
8044  json.BeginObject();
8045 
8046  json.WriteString("Size");
8047  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8048 
8049  json.WriteString("Flags");
8050  json.BeginArray(true);
8051  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8052  {
8053  json.WriteString("DEVICE_LOCAL");
8054  }
8055  json.EndArray();
8056 
8057  if(stats.memoryHeap[heapIndex].blockCount > 0)
8058  {
8059  json.WriteString("Stats");
8060  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
8061  }
8062 
8063  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8064  {
8065  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8066  {
8067  json.BeginString("Type ");
8068  json.ContinueString(typeIndex);
8069  json.EndString();
8070 
8071  json.BeginObject();
8072 
8073  json.WriteString("Flags");
8074  json.BeginArray(true);
8075  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8076  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8077  {
8078  json.WriteString("DEVICE_LOCAL");
8079  }
8080  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8081  {
8082  json.WriteString("HOST_VISIBLE");
8083  }
8084  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8085  {
8086  json.WriteString("HOST_COHERENT");
8087  }
8088  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8089  {
8090  json.WriteString("HOST_CACHED");
8091  }
8092  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8093  {
8094  json.WriteString("LAZILY_ALLOCATED");
8095  }
8096  json.EndArray();
8097 
8098  if(stats.memoryType[typeIndex].blockCount > 0)
8099  {
8100  json.WriteString("Stats");
8101  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
8102  }
8103 
8104  json.EndObject();
8105  }
8106  }
8107 
8108  json.EndObject();
8109  }
8110  if(detailedMap == VK_TRUE)
8111  {
8112  allocator->PrintDetailedMap(json);
8113  }
8114 
8115  json.EndObject();
8116  }
8117 
8118  const size_t len = sb.GetLength();
8119  char* const pChars = vma_new_array(allocator, char, len + 1);
8120  if(len > 0)
8121  {
8122  memcpy(pChars, sb.GetData(), len);
8123  }
8124  pChars[len] = '\0';
8125  *ppStatsString = pChars;
8126 }
8127 
8128 void vmaFreeStatsString(
8129  VmaAllocator allocator,
8130  char* pStatsString)
8131 {
8132  if(pStatsString != VMA_NULL)
8133  {
8134  VMA_ASSERT(allocator);
8135  size_t len = strlen(pStatsString);
8136  vma_delete_array(allocator, pStatsString, len + 1);
8137  }
8138 }
8139 
8140 #endif // #if VMA_STATS_STRING_ENABLED
8141 
8142 /*
8143 This function is not protected by any mutex because it just reads immutable data.
8144 */
8145 VkResult vmaFindMemoryTypeIndex(
8146  VmaAllocator allocator,
8147  uint32_t memoryTypeBits,
8148  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8149  uint32_t* pMemoryTypeIndex)
8150 {
8151  VMA_ASSERT(allocator != VK_NULL_HANDLE);
8152  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8153  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8154 
8155  if(pAllocationCreateInfo->memoryTypeBits != 0)
8156  {
8157  memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
8158  }
8159 
8160  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
8161  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
8162 
8163  // Convert usage to requiredFlags and preferredFlags.
8164  switch(pAllocationCreateInfo->usage)
8165  {
8167  break;
8169  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8170  break;
8172  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8173  break;
8175  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8176  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8177  break;
8179  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8180  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8181  break;
8182  default:
8183  break;
8184  }
8185 
8186  *pMemoryTypeIndex = UINT32_MAX;
8187  uint32_t minCost = UINT32_MAX;
8188  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8189  memTypeIndex < allocator->GetMemoryTypeCount();
8190  ++memTypeIndex, memTypeBit <<= 1)
8191  {
8192  // This memory type is acceptable according to memoryTypeBits bitmask.
8193  if((memTypeBit & memoryTypeBits) != 0)
8194  {
8195  const VkMemoryPropertyFlags currFlags =
8196  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8197  // This memory type contains requiredFlags.
8198  if((requiredFlags & ~currFlags) == 0)
8199  {
8200  // Calculate cost as number of bits from preferredFlags not present in this memory type.
8201  uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8202  // Remember memory type with lowest cost.
8203  if(currCost < minCost)
8204  {
8205  *pMemoryTypeIndex = memTypeIndex;
8206  if(currCost == 0)
8207  {
8208  return VK_SUCCESS;
8209  }
8210  minCost = currCost;
8211  }
8212  }
8213  }
8214  }
8215  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8216 }
8217 
8218 VkResult vmaCreatePool(
8219  VmaAllocator allocator,
8220  const VmaPoolCreateInfo* pCreateInfo,
8221  VmaPool* pPool)
8222 {
8223  VMA_ASSERT(allocator && pCreateInfo && pPool);
8224 
8225  VMA_DEBUG_LOG("vmaCreatePool");
8226 
8227  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8228 
8229  return allocator->CreatePool(pCreateInfo, pPool);
8230 }
8231 
8232 void vmaDestroyPool(
8233  VmaAllocator allocator,
8234  VmaPool pool)
8235 {
8236  VMA_ASSERT(allocator);
8237 
8238  if(pool == VK_NULL_HANDLE)
8239  {
8240  return;
8241  }
8242 
8243  VMA_DEBUG_LOG("vmaDestroyPool");
8244 
8245  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8246 
8247  allocator->DestroyPool(pool);
8248 }
8249 
8250 void vmaGetPoolStats(
8251  VmaAllocator allocator,
8252  VmaPool pool,
8253  VmaPoolStats* pPoolStats)
8254 {
8255  VMA_ASSERT(allocator && pool && pPoolStats);
8256 
8257  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8258 
8259  allocator->GetPoolStats(pool, pPoolStats);
8260 }
8261 
8263  VmaAllocator allocator,
8264  VmaPool pool,
8265  size_t* pLostAllocationCount)
8266 {
8267  VMA_ASSERT(allocator && pool);
8268 
8269  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8270 
8271  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8272 }
8273 
8274 VkResult vmaAllocateMemory(
8275  VmaAllocator allocator,
8276  const VkMemoryRequirements* pVkMemoryRequirements,
8277  const VmaAllocationCreateInfo* pCreateInfo,
8278  VmaAllocation* pAllocation,
8279  VmaAllocationInfo* pAllocationInfo)
8280 {
8281  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8282 
8283  VMA_DEBUG_LOG("vmaAllocateMemory");
8284 
8285  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8286 
8287  VkResult result = allocator->AllocateMemory(
8288  *pVkMemoryRequirements,
8289  false, // requiresDedicatedAllocation
8290  false, // prefersDedicatedAllocation
8291  VK_NULL_HANDLE, // dedicatedBuffer
8292  VK_NULL_HANDLE, // dedicatedImage
8293  *pCreateInfo,
8294  VMA_SUBALLOCATION_TYPE_UNKNOWN,
8295  pAllocation);
8296 
8297  if(pAllocationInfo && result == VK_SUCCESS)
8298  {
8299  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8300  }
8301 
8302  return result;
8303 }
8304 
8306  VmaAllocator allocator,
8307  VkBuffer buffer,
8308  const VmaAllocationCreateInfo* pCreateInfo,
8309  VmaAllocation* pAllocation,
8310  VmaAllocationInfo* pAllocationInfo)
8311 {
8312  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8313 
8314  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
8315 
8316  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8317 
8318  VkMemoryRequirements vkMemReq = {};
8319  bool requiresDedicatedAllocation = false;
8320  bool prefersDedicatedAllocation = false;
8321  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8322  requiresDedicatedAllocation,
8323  prefersDedicatedAllocation);
8324 
8325  VkResult result = allocator->AllocateMemory(
8326  vkMemReq,
8327  requiresDedicatedAllocation,
8328  prefersDedicatedAllocation,
8329  buffer, // dedicatedBuffer
8330  VK_NULL_HANDLE, // dedicatedImage
8331  *pCreateInfo,
8332  VMA_SUBALLOCATION_TYPE_BUFFER,
8333  pAllocation);
8334 
8335  if(pAllocationInfo && result == VK_SUCCESS)
8336  {
8337  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8338  }
8339 
8340  return result;
8341 }
8342 
8343 VkResult vmaAllocateMemoryForImage(
8344  VmaAllocator allocator,
8345  VkImage image,
8346  const VmaAllocationCreateInfo* pCreateInfo,
8347  VmaAllocation* pAllocation,
8348  VmaAllocationInfo* pAllocationInfo)
8349 {
8350  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8351 
8352  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
8353 
8354  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8355 
8356  VkResult result = AllocateMemoryForImage(
8357  allocator,
8358  image,
8359  pCreateInfo,
8360  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8361  pAllocation);
8362 
8363  if(pAllocationInfo && result == VK_SUCCESS)
8364  {
8365  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8366  }
8367 
8368  return result;
8369 }
8370 
8371 void vmaFreeMemory(
8372  VmaAllocator allocator,
8373  VmaAllocation allocation)
8374 {
8375  VMA_ASSERT(allocator && allocation);
8376 
8377  VMA_DEBUG_LOG("vmaFreeMemory");
8378 
8379  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8380 
8381  allocator->FreeMemory(allocation);
8382 }
8383 
8385  VmaAllocator allocator,
8386  VmaAllocation allocation,
8387  VmaAllocationInfo* pAllocationInfo)
8388 {
8389  VMA_ASSERT(allocator && allocation && pAllocationInfo);
8390 
8391  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8392 
8393  allocator->GetAllocationInfo(allocation, pAllocationInfo);
8394 }
8395 
8397  VmaAllocator allocator,
8398  VmaAllocation allocation,
8399  void* pUserData)
8400 {
8401  VMA_ASSERT(allocator && allocation);
8402 
8403  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8404 
8405  allocation->SetUserData(allocator, pUserData);
8406 }
8407 
8409  VmaAllocator allocator,
8410  VmaAllocation* pAllocation)
8411 {
8412  VMA_ASSERT(allocator && pAllocation);
8413 
8414  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8415 
8416  allocator->CreateLostAllocation(pAllocation);
8417 }
8418 
8419 VkResult vmaMapMemory(
8420  VmaAllocator allocator,
8421  VmaAllocation allocation,
8422  void** ppData)
8423 {
8424  VMA_ASSERT(allocator && allocation && ppData);
8425 
8426  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8427 
8428  return allocator->Map(allocation, ppData);
8429 }
8430 
8431 void vmaUnmapMemory(
8432  VmaAllocator allocator,
8433  VmaAllocation allocation)
8434 {
8435  VMA_ASSERT(allocator && allocation);
8436 
8437  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8438 
8439  allocator->Unmap(allocation);
8440 }
8441 
8442 VkResult vmaDefragment(
8443  VmaAllocator allocator,
8444  VmaAllocation* pAllocations,
8445  size_t allocationCount,
8446  VkBool32* pAllocationsChanged,
8447  const VmaDefragmentationInfo *pDefragmentationInfo,
8448  VmaDefragmentationStats* pDefragmentationStats)
8449 {
8450  VMA_ASSERT(allocator && pAllocations);
8451 
8452  VMA_DEBUG_LOG("vmaDefragment");
8453 
8454  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8455 
8456  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8457 }
8458 
8459 VkResult vmaCreateBuffer(
8460  VmaAllocator allocator,
8461  const VkBufferCreateInfo* pBufferCreateInfo,
8462  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8463  VkBuffer* pBuffer,
8464  VmaAllocation* pAllocation,
8465  VmaAllocationInfo* pAllocationInfo)
8466 {
8467  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8468 
8469  VMA_DEBUG_LOG("vmaCreateBuffer");
8470 
8471  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8472 
8473  *pBuffer = VK_NULL_HANDLE;
8474  *pAllocation = VK_NULL_HANDLE;
8475 
8476  // 1. Create VkBuffer.
8477  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8478  allocator->m_hDevice,
8479  pBufferCreateInfo,
8480  allocator->GetAllocationCallbacks(),
8481  pBuffer);
8482  if(res >= 0)
8483  {
8484  // 2. vkGetBufferMemoryRequirements.
8485  VkMemoryRequirements vkMemReq = {};
8486  bool requiresDedicatedAllocation = false;
8487  bool prefersDedicatedAllocation = false;
8488  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8489  requiresDedicatedAllocation, prefersDedicatedAllocation);
8490 
8491  // Make sure alignment requirements for specific buffer usages reported
8492  // in Physical Device Properties are included in alignment reported by memory requirements.
8493  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8494  {
8495  VMA_ASSERT(vkMemReq.alignment %
8496  allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8497  }
8498  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8499  {
8500  VMA_ASSERT(vkMemReq.alignment %
8501  allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8502  }
8503  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8504  {
8505  VMA_ASSERT(vkMemReq.alignment %
8506  allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8507  }
8508 
8509  // 3. Allocate memory using allocator.
8510  res = allocator->AllocateMemory(
8511  vkMemReq,
8512  requiresDedicatedAllocation,
8513  prefersDedicatedAllocation,
8514  *pBuffer, // dedicatedBuffer
8515  VK_NULL_HANDLE, // dedicatedImage
8516  *pAllocationCreateInfo,
8517  VMA_SUBALLOCATION_TYPE_BUFFER,
8518  pAllocation);
8519  if(res >= 0)
8520  {
8521  // 3. Bind buffer with memory.
8522  res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8523  allocator->m_hDevice,
8524  *pBuffer,
8525  (*pAllocation)->GetMemory(),
8526  (*pAllocation)->GetOffset());
8527  if(res >= 0)
8528  {
8529  // All steps succeeded.
8530  if(pAllocationInfo != VMA_NULL)
8531  {
8532  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8533  }
8534  return VK_SUCCESS;
8535  }
8536  allocator->FreeMemory(*pAllocation);
8537  *pAllocation = VK_NULL_HANDLE;
8538  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8539  *pBuffer = VK_NULL_HANDLE;
8540  return res;
8541  }
8542  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8543  *pBuffer = VK_NULL_HANDLE;
8544  return res;
8545  }
8546  return res;
8547 }
8548 
8549 void vmaDestroyBuffer(
8550  VmaAllocator allocator,
8551  VkBuffer buffer,
8552  VmaAllocation allocation)
8553 {
8554  if(buffer != VK_NULL_HANDLE)
8555  {
8556  VMA_ASSERT(allocator);
8557 
8558  VMA_DEBUG_LOG("vmaDestroyBuffer");
8559 
8560  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8561 
8562  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8563 
8564  allocator->FreeMemory(allocation);
8565  }
8566 }
8567 
8568 VkResult vmaCreateImage(
8569  VmaAllocator allocator,
8570  const VkImageCreateInfo* pImageCreateInfo,
8571  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8572  VkImage* pImage,
8573  VmaAllocation* pAllocation,
8574  VmaAllocationInfo* pAllocationInfo)
8575 {
8576  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8577 
8578  VMA_DEBUG_LOG("vmaCreateImage");
8579 
8580  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8581 
8582  *pImage = VK_NULL_HANDLE;
8583  *pAllocation = VK_NULL_HANDLE;
8584 
8585  // 1. Create VkImage.
8586  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8587  allocator->m_hDevice,
8588  pImageCreateInfo,
8589  allocator->GetAllocationCallbacks(),
8590  pImage);
8591  if(res >= 0)
8592  {
8593  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8594  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8595  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8596 
8597  // 2. Allocate memory using allocator.
8598  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8599  if(res >= 0)
8600  {
8601  // 3. Bind image with memory.
8602  res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8603  allocator->m_hDevice,
8604  *pImage,
8605  (*pAllocation)->GetMemory(),
8606  (*pAllocation)->GetOffset());
8607  if(res >= 0)
8608  {
8609  // All steps succeeded.
8610  if(pAllocationInfo != VMA_NULL)
8611  {
8612  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8613  }
8614  return VK_SUCCESS;
8615  }
8616  allocator->FreeMemory(*pAllocation);
8617  *pAllocation = VK_NULL_HANDLE;
8618  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8619  *pImage = VK_NULL_HANDLE;
8620  return res;
8621  }
8622  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8623  *pImage = VK_NULL_HANDLE;
8624  return res;
8625  }
8626  return res;
8627 }
8628 
8629 void vmaDestroyImage(
8630  VmaAllocator allocator,
8631  VkImage image,
8632  VmaAllocation allocation)
8633 {
8634  if(image != VK_NULL_HANDLE)
8635  {
8636  VMA_ASSERT(allocator);
8637 
8638  VMA_DEBUG_LOG("vmaDestroyImage");
8639 
8640  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8641 
8642  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8643 
8644  allocator->FreeMemory(allocation);
8645  }
8646 }
8647 
8648 #endif // #ifdef VMA_IMPLEMENTATION
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:793
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1047
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:818
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:803
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1004
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:797
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1315
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:815
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1481
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1185
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1239
Definition: vk_mem_alloc.h:1084
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:786
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1122
Definition: vk_mem_alloc.h:1031
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:827
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:880
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:812
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1035
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:945
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:800
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:944
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:808
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1485
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:844
VmaStatInfo total
Definition: vk_mem_alloc.h:954
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1493
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1106
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1476
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:801
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:728
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:821
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1193
Definition: vk_mem_alloc.h:1187
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1325
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:798
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1143
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1209
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1245
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:784
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1196
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:982
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1471
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1489
Definition: vk_mem_alloc.h:1021
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1130
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:799
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:950
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:734
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:755
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:760
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1491
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1117
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1255
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:794
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:933
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1204
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:747
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1091
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:946
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:751
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1199
Definition: vk_mem_alloc.h:1030
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1112
Definition: vk_mem_alloc.h:1103
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:936
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:796
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1217
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:830
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1248
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1101
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1136
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:868
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:952
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1071
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:945
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:805
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:749
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:804
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1231
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1339
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:824
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:945
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:942
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1236
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1320
Definition: vk_mem_alloc.h:1099
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1487
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:792
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:807
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:940
Definition: vk_mem_alloc.h:987
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1189
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:938
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:802
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:806
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1058
Definition: vk_mem_alloc.h:1014
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1334
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:782
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:795
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1301
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1167
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:946
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:953
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1242
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:946
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1306