Vulkan Memory Allocator
vk_mem_alloc.h
Go to the documentation of this file.
1 //
2 // Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved.
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21 //
22 
23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
25 
26 #ifdef __cplusplus
27 extern "C" {
28 #endif
29 
688 #include <vulkan/vulkan.h>
689 
690 VK_DEFINE_HANDLE(VmaAllocator)
691 
692 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
694  VmaAllocator allocator,
695  uint32_t memoryType,
696  VkDeviceMemory memory,
697  VkDeviceSize size);
699 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
700  VmaAllocator allocator,
701  uint32_t memoryType,
702  VkDeviceMemory memory,
703  VkDeviceSize size);
704 
712 typedef struct VmaDeviceMemoryCallbacks {
718 
748 
751 typedef VkFlags VmaAllocatorCreateFlags;
752 
757 typedef struct VmaVulkanFunctions {
758  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
759  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
760  PFN_vkAllocateMemory vkAllocateMemory;
761  PFN_vkFreeMemory vkFreeMemory;
762  PFN_vkMapMemory vkMapMemory;
763  PFN_vkUnmapMemory vkUnmapMemory;
764  PFN_vkBindBufferMemory vkBindBufferMemory;
765  PFN_vkBindImageMemory vkBindImageMemory;
766  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
767  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
768  PFN_vkCreateBuffer vkCreateBuffer;
769  PFN_vkDestroyBuffer vkDestroyBuffer;
770  PFN_vkCreateImage vkCreateImage;
771  PFN_vkDestroyImage vkDestroyImage;
772  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
773  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
775 
778 {
780  VmaAllocatorCreateFlags flags;
782 
783  VkPhysicalDevice physicalDevice;
785 
786  VkDevice device;
788 
791 
794 
795  const VkAllocationCallbacks* pAllocationCallbacks;
797 
812  uint32_t frameInUseCount;
836  const VkDeviceSize* pHeapSizeLimit;
850 
852 VkResult vmaCreateAllocator(
853  const VmaAllocatorCreateInfo* pCreateInfo,
854  VmaAllocator* pAllocator);
855 
858  VmaAllocator allocator);
859 
865  VmaAllocator allocator,
866  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
867 
873  VmaAllocator allocator,
874  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
875 
883  VmaAllocator allocator,
884  uint32_t memoryTypeIndex,
885  VkMemoryPropertyFlags* pFlags);
886 
896  VmaAllocator allocator,
897  uint32_t frameIndex);
898 
901 typedef struct VmaStatInfo
902 {
904  uint32_t blockCount;
906  uint32_t allocationCount;
910  VkDeviceSize usedBytes;
912  VkDeviceSize unusedBytes;
913  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
914  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
915 } VmaStatInfo;
916 
918 typedef struct VmaStats
919 {
920  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
921  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
923 } VmaStats;
924 
926 void vmaCalculateStats(
927  VmaAllocator allocator,
928  VmaStats* pStats);
929 
930 #define VMA_STATS_STRING_ENABLED 1
931 
932 #if VMA_STATS_STRING_ENABLED
933 
935 
938  VmaAllocator allocator,
939  char** ppStatsString,
940  VkBool32 detailedMap);
941 
942 void vmaFreeStatsString(
943  VmaAllocator allocator,
944  char* pStatsString);
945 
946 #endif // #if VMA_STATS_STRING_ENABLED
947 
948 VK_DEFINE_HANDLE(VmaPool)
949 
950 typedef enum VmaMemoryUsage
951 {
991 
1006 
1056 
1060 
1062 {
1064  VmaAllocationCreateFlags flags;
1075  VkMemoryPropertyFlags requiredFlags;
1080  VkMemoryPropertyFlags preferredFlags;
1088  uint32_t memoryTypeBits;
1094  VmaPool pool;
1101  void* pUserData;
1103 
1118 VkResult vmaFindMemoryTypeIndex(
1119  VmaAllocator allocator,
1120  uint32_t memoryTypeBits,
1121  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1122  uint32_t* pMemoryTypeIndex);
1123 
1144 
1147 typedef VkFlags VmaPoolCreateFlags;
1148 
1151 typedef struct VmaPoolCreateInfo {
1157  VmaPoolCreateFlags flags;
1162  VkDeviceSize blockSize;
1191 
1194 typedef struct VmaPoolStats {
1197  VkDeviceSize size;
1200  VkDeviceSize unusedSize;
1213  VkDeviceSize unusedRangeSizeMax;
1214 } VmaPoolStats;
1215 
1222 VkResult vmaCreatePool(
1223  VmaAllocator allocator,
1224  const VmaPoolCreateInfo* pCreateInfo,
1225  VmaPool* pPool);
1226 
1229 void vmaDestroyPool(
1230  VmaAllocator allocator,
1231  VmaPool pool);
1232 
1239 void vmaGetPoolStats(
1240  VmaAllocator allocator,
1241  VmaPool pool,
1242  VmaPoolStats* pPoolStats);
1243 
1251  VmaAllocator allocator,
1252  VmaPool pool,
1253  size_t* pLostAllocationCount);
1254 
1255 VK_DEFINE_HANDLE(VmaAllocation)
1256 
1257 
1259 typedef struct VmaAllocationInfo {
1264  uint32_t memoryType;
1273  VkDeviceMemory deviceMemory;
1278  VkDeviceSize offset;
1283  VkDeviceSize size;
1297  void* pUserData;
1299 
1310 VkResult vmaAllocateMemory(
1311  VmaAllocator allocator,
1312  const VkMemoryRequirements* pVkMemoryRequirements,
1313  const VmaAllocationCreateInfo* pCreateInfo,
1314  VmaAllocation* pAllocation,
1315  VmaAllocationInfo* pAllocationInfo);
1316 
1324  VmaAllocator allocator,
1325  VkBuffer buffer,
1326  const VmaAllocationCreateInfo* pCreateInfo,
1327  VmaAllocation* pAllocation,
1328  VmaAllocationInfo* pAllocationInfo);
1329 
1331 VkResult vmaAllocateMemoryForImage(
1332  VmaAllocator allocator,
1333  VkImage image,
1334  const VmaAllocationCreateInfo* pCreateInfo,
1335  VmaAllocation* pAllocation,
1336  VmaAllocationInfo* pAllocationInfo);
1337 
1339 void vmaFreeMemory(
1340  VmaAllocator allocator,
1341  VmaAllocation allocation);
1342 
1345  VmaAllocator allocator,
1346  VmaAllocation allocation,
1347  VmaAllocationInfo* pAllocationInfo);
1348 
1363  VmaAllocator allocator,
1364  VmaAllocation allocation,
1365  void* pUserData);
1366 
1378  VmaAllocator allocator,
1379  VmaAllocation* pAllocation);
1380 
1415 VkResult vmaMapMemory(
1416  VmaAllocator allocator,
1417  VmaAllocation allocation,
1418  void** ppData);
1419 
1424 void vmaUnmapMemory(
1425  VmaAllocator allocator,
1426  VmaAllocation allocation);
1427 
1429 typedef struct VmaDefragmentationInfo {
1434  VkDeviceSize maxBytesToMove;
1441 
1443 typedef struct VmaDefragmentationStats {
1445  VkDeviceSize bytesMoved;
1447  VkDeviceSize bytesFreed;
1453 
1530 VkResult vmaDefragment(
1531  VmaAllocator allocator,
1532  VmaAllocation* pAllocations,
1533  size_t allocationCount,
1534  VkBool32* pAllocationsChanged,
1535  const VmaDefragmentationInfo *pDefragmentationInfo,
1536  VmaDefragmentationStats* pDefragmentationStats);
1537 
1564 VkResult vmaCreateBuffer(
1565  VmaAllocator allocator,
1566  const VkBufferCreateInfo* pBufferCreateInfo,
1567  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1568  VkBuffer* pBuffer,
1569  VmaAllocation* pAllocation,
1570  VmaAllocationInfo* pAllocationInfo);
1571 
1583 void vmaDestroyBuffer(
1584  VmaAllocator allocator,
1585  VkBuffer buffer,
1586  VmaAllocation allocation);
1587 
1589 VkResult vmaCreateImage(
1590  VmaAllocator allocator,
1591  const VkImageCreateInfo* pImageCreateInfo,
1592  const VmaAllocationCreateInfo* pAllocationCreateInfo,
1593  VkImage* pImage,
1594  VmaAllocation* pAllocation,
1595  VmaAllocationInfo* pAllocationInfo);
1596 
1608 void vmaDestroyImage(
1609  VmaAllocator allocator,
1610  VkImage image,
1611  VmaAllocation allocation);
1612 
1613 #ifdef __cplusplus
1614 }
1615 #endif
1616 
1617 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
1618 
1619 // For Visual Studio IntelliSense.
1620 #ifdef __INTELLISENSE__
1621 #define VMA_IMPLEMENTATION
1622 #endif
1623 
1624 #ifdef VMA_IMPLEMENTATION
1625 #undef VMA_IMPLEMENTATION
1626 
1627 #include <cstdint>
1628 #include <cstdlib>
1629 #include <cstring>
1630 
1631 /*******************************************************************************
1632 CONFIGURATION SECTION
1633 
1634 Define some of these macros before each #include of this header or change them
1635 here if you need other then default behavior depending on your environment.
1636 */
1637 
1638 /*
1639 Define this macro to 1 to make the library fetch pointers to Vulkan functions
1640 internally, like:
1641 
1642  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
1643 
1644 Define to 0 if you are going to provide you own pointers to Vulkan functions via
1645 VmaAllocatorCreateInfo::pVulkanFunctions.
1646 */
1647 #ifndef VMA_STATIC_VULKAN_FUNCTIONS
1648 #define VMA_STATIC_VULKAN_FUNCTIONS 1
1649 #endif
1650 
1651 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
1652 //#define VMA_USE_STL_CONTAINERS 1
1653 
1654 /* Set this macro to 1 to make the library including and using STL containers:
1655 std::pair, std::vector, std::list, std::unordered_map.
1656 
1657 Set it to 0 or undefined to make the library using its own implementation of
1658 the containers.
1659 */
1660 #if VMA_USE_STL_CONTAINERS
1661  #define VMA_USE_STL_VECTOR 1
1662  #define VMA_USE_STL_UNORDERED_MAP 1
1663  #define VMA_USE_STL_LIST 1
1664 #endif
1665 
1666 #if VMA_USE_STL_VECTOR
1667  #include <vector>
1668 #endif
1669 
1670 #if VMA_USE_STL_UNORDERED_MAP
1671  #include <unordered_map>
1672 #endif
1673 
1674 #if VMA_USE_STL_LIST
1675  #include <list>
1676 #endif
1677 
1678 /*
1679 Following headers are used in this CONFIGURATION section only, so feel free to
1680 remove them if not needed.
1681 */
1682 #include <cassert> // for assert
1683 #include <algorithm> // for min, max
1684 #include <mutex> // for std::mutex
1685 #include <atomic> // for std::atomic
1686 
1687 #if !defined(_WIN32)
1688  #include <malloc.h> // for aligned_alloc()
1689 #endif
1690 
1691 // Normal assert to check for programmer's errors, especially in Debug configuration.
1692 #ifndef VMA_ASSERT
1693  #ifdef _DEBUG
1694  #define VMA_ASSERT(expr) assert(expr)
1695  #else
1696  #define VMA_ASSERT(expr)
1697  #endif
1698 #endif
1699 
1700 // Assert that will be called very often, like inside data structures e.g. operator[].
1701 // Making it non-empty can make program slow.
1702 #ifndef VMA_HEAVY_ASSERT
1703  #ifdef _DEBUG
1704  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
1705  #else
1706  #define VMA_HEAVY_ASSERT(expr)
1707  #endif
1708 #endif
1709 
1710 #ifndef VMA_NULL
1711  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
1712  #define VMA_NULL nullptr
1713 #endif
1714 
1715 #ifndef VMA_ALIGN_OF
1716  #define VMA_ALIGN_OF(type) (__alignof(type))
1717 #endif
1718 
1719 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
1720  #if defined(_WIN32)
1721  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
1722  #else
1723  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
1724  #endif
1725 #endif
1726 
1727 #ifndef VMA_SYSTEM_FREE
1728  #if defined(_WIN32)
1729  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
1730  #else
1731  #define VMA_SYSTEM_FREE(ptr) free(ptr)
1732  #endif
1733 #endif
1734 
1735 #ifndef VMA_MIN
1736  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
1737 #endif
1738 
1739 #ifndef VMA_MAX
1740  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
1741 #endif
1742 
1743 #ifndef VMA_SWAP
1744  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
1745 #endif
1746 
1747 #ifndef VMA_SORT
1748  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
1749 #endif
1750 
1751 #ifndef VMA_DEBUG_LOG
1752  #define VMA_DEBUG_LOG(format, ...)
1753  /*
1754  #define VMA_DEBUG_LOG(format, ...) do { \
1755  printf(format, __VA_ARGS__); \
1756  printf("\n"); \
1757  } while(false)
1758  */
1759 #endif
1760 
1761 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
1762 #if VMA_STATS_STRING_ENABLED
1763  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
1764  {
1765  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
1766  }
1767  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
1768  {
1769  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
1770  }
1771  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
1772  {
1773  snprintf(outStr, strLen, "%p", ptr);
1774  }
1775 #endif
1776 
1777 #ifndef VMA_MUTEX
1778  class VmaMutex
1779  {
1780  public:
1781  VmaMutex() { }
1782  ~VmaMutex() { }
1783  void Lock() { m_Mutex.lock(); }
1784  void Unlock() { m_Mutex.unlock(); }
1785  private:
1786  std::mutex m_Mutex;
1787  };
1788  #define VMA_MUTEX VmaMutex
1789 #endif
1790 
1791 /*
1792 If providing your own implementation, you need to implement a subset of std::atomic:
1793 
1794 - Constructor(uint32_t desired)
1795 - uint32_t load() const
1796 - void store(uint32_t desired)
1797 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
1798 */
1799 #ifndef VMA_ATOMIC_UINT32
1800  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
1801 #endif
1802 
1803 #ifndef VMA_BEST_FIT
1804 
1816  #define VMA_BEST_FIT (1)
1817 #endif
1818 
1819 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
1820 
1824  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
1825 #endif
1826 
1827 #ifndef VMA_DEBUG_ALIGNMENT
1828 
1832  #define VMA_DEBUG_ALIGNMENT (1)
1833 #endif
1834 
1835 #ifndef VMA_DEBUG_MARGIN
1836 
1840  #define VMA_DEBUG_MARGIN (0)
1841 #endif
1842 
1843 #ifndef VMA_DEBUG_GLOBAL_MUTEX
1844 
1848  #define VMA_DEBUG_GLOBAL_MUTEX (0)
1849 #endif
1850 
1851 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
1852 
1856  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
1857 #endif
1858 
1859 #ifndef VMA_SMALL_HEAP_MAX_SIZE
1860  #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024)
1862 #endif
1863 
1864 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
1865  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024)
1867 #endif
1868 
1869 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE
1870  #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024)
1872 #endif
1873 
1874 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1875 
1876 /*******************************************************************************
1877 END OF CONFIGURATION
1878 */
1879 
1880 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1881  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1882 
1883 // Returns number of bits set to 1 in (v).
1884 static inline uint32_t VmaCountBitsSet(uint32_t v)
1885 {
1886  uint32_t c = v - ((v >> 1) & 0x55555555);
1887  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1888  c = ((c >> 4) + c) & 0x0F0F0F0F;
1889  c = ((c >> 8) + c) & 0x00FF00FF;
1890  c = ((c >> 16) + c) & 0x0000FFFF;
1891  return c;
1892 }
1893 
1894 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
1895 // Use types like uint32_t, uint64_t as T.
1896 template <typename T>
1897 static inline T VmaAlignUp(T val, T align)
1898 {
1899  return (val + align - 1) / align * align;
1900 }
1901 
1902 // Division with mathematical rounding to nearest number.
1903 template <typename T>
1904 inline T VmaRoundDiv(T x, T y)
1905 {
1906  return (x + (y / (T)2)) / y;
1907 }
1908 
1909 #ifndef VMA_SORT
1910 
1911 template<typename Iterator, typename Compare>
1912 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1913 {
1914  Iterator centerValue = end; --centerValue;
1915  Iterator insertIndex = beg;
1916  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1917  {
1918  if(cmp(*memTypeIndex, *centerValue))
1919  {
1920  if(insertIndex != memTypeIndex)
1921  {
1922  VMA_SWAP(*memTypeIndex, *insertIndex);
1923  }
1924  ++insertIndex;
1925  }
1926  }
1927  if(insertIndex != centerValue)
1928  {
1929  VMA_SWAP(*insertIndex, *centerValue);
1930  }
1931  return insertIndex;
1932 }
1933 
1934 template<typename Iterator, typename Compare>
1935 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1936 {
1937  if(beg < end)
1938  {
1939  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1940  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1941  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1942  }
1943 }
1944 
1945 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
1946 
1947 #endif // #ifndef VMA_SORT
1948 
1949 /*
1950 Returns true if two memory blocks occupy overlapping pages.
1951 ResourceA must be in less memory offset than ResourceB.
1952 
1953 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
1954 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
1955 */
1956 static inline bool VmaBlocksOnSamePage(
1957  VkDeviceSize resourceAOffset,
1958  VkDeviceSize resourceASize,
1959  VkDeviceSize resourceBOffset,
1960  VkDeviceSize pageSize)
1961 {
1962  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1963  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1964  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1965  VkDeviceSize resourceBStart = resourceBOffset;
1966  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1967  return resourceAEndPage == resourceBStartPage;
1968 }
1969 
1970 enum VmaSuballocationType
1971 {
1972  VMA_SUBALLOCATION_TYPE_FREE = 0,
1973  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1974  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1975  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1976  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1977  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1978  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1979 };
1980 
1981 /*
1982 Returns true if given suballocation types could conflict and must respect
1983 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
1984 or linear image and another one is optimal image. If type is unknown, behave
1985 conservatively.
1986 */
1987 static inline bool VmaIsBufferImageGranularityConflict(
1988  VmaSuballocationType suballocType1,
1989  VmaSuballocationType suballocType2)
1990 {
1991  if(suballocType1 > suballocType2)
1992  {
1993  VMA_SWAP(suballocType1, suballocType2);
1994  }
1995 
1996  switch(suballocType1)
1997  {
1998  case VMA_SUBALLOCATION_TYPE_FREE:
1999  return false;
2000  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2001  return true;
2002  case VMA_SUBALLOCATION_TYPE_BUFFER:
2003  return
2004  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2005  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2006  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2007  return
2008  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2009  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2010  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2011  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2012  return
2013  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2014  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2015  return false;
2016  default:
2017  VMA_ASSERT(0);
2018  return true;
2019  }
2020 }
2021 
2022 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
2023 struct VmaMutexLock
2024 {
2025 public:
2026  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
2027  m_pMutex(useMutex ? &mutex : VMA_NULL)
2028  {
2029  if(m_pMutex)
2030  {
2031  m_pMutex->Lock();
2032  }
2033  }
2034 
2035  ~VmaMutexLock()
2036  {
2037  if(m_pMutex)
2038  {
2039  m_pMutex->Unlock();
2040  }
2041  }
2042 
2043 private:
2044  VMA_MUTEX* m_pMutex;
2045 };
2046 
2047 #if VMA_DEBUG_GLOBAL_MUTEX
2048  static VMA_MUTEX gDebugGlobalMutex;
2049  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
2050 #else
2051  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
2052 #endif
2053 
2054 // Minimum size of a free suballocation to register it in the free suballocation collection.
2055 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2056 
2057 /*
2058 Performs binary search and returns iterator to first element that is greater or
2059 equal to (key), according to comparison (cmp).
2060 
2061 Cmp should return true if first argument is less than second argument.
2062 
2063 Returned value is the found element, if present in the collection or place where
2064 new element with value (key) should be inserted.
2065 */
2066 template <typename IterT, typename KeyT, typename CmpT>
2067 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
2068 {
2069  size_t down = 0, up = (end - beg);
2070  while(down < up)
2071  {
2072  const size_t mid = (down + up) / 2;
2073  if(cmp(*(beg+mid), key))
2074  {
2075  down = mid + 1;
2076  }
2077  else
2078  {
2079  up = mid;
2080  }
2081  }
2082  return beg + down;
2083 }
2084 
2086 // Memory allocation
2087 
2088 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
2089 {
2090  if((pAllocationCallbacks != VMA_NULL) &&
2091  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2092  {
2093  return (*pAllocationCallbacks->pfnAllocation)(
2094  pAllocationCallbacks->pUserData,
2095  size,
2096  alignment,
2097  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2098  }
2099  else
2100  {
2101  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2102  }
2103 }
2104 
2105 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
2106 {
2107  if((pAllocationCallbacks != VMA_NULL) &&
2108  (pAllocationCallbacks->pfnFree != VMA_NULL))
2109  {
2110  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2111  }
2112  else
2113  {
2114  VMA_SYSTEM_FREE(ptr);
2115  }
2116 }
2117 
2118 template<typename T>
2119 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
2120 {
2121  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
2122 }
2123 
2124 template<typename T>
2125 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
2126 {
2127  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
2128 }
2129 
2130 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
2131 
2132 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
2133 
2134 template<typename T>
2135 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2136 {
2137  ptr->~T();
2138  VmaFree(pAllocationCallbacks, ptr);
2139 }
2140 
2141 template<typename T>
2142 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
2143 {
2144  if(ptr != VMA_NULL)
2145  {
2146  for(size_t i = count; i--; )
2147  {
2148  ptr[i].~T();
2149  }
2150  VmaFree(pAllocationCallbacks, ptr);
2151  }
2152 }
2153 
2154 // STL-compatible allocator.
2155 template<typename T>
2156 class VmaStlAllocator
2157 {
2158 public:
2159  const VkAllocationCallbacks* const m_pCallbacks;
2160  typedef T value_type;
2161 
2162  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2163  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2164 
2165  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
2166  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
2167 
2168  template<typename U>
2169  bool operator==(const VmaStlAllocator<U>& rhs) const
2170  {
2171  return m_pCallbacks == rhs.m_pCallbacks;
2172  }
2173  template<typename U>
2174  bool operator!=(const VmaStlAllocator<U>& rhs) const
2175  {
2176  return m_pCallbacks != rhs.m_pCallbacks;
2177  }
2178 
2179  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
2180 };
2181 
2182 #if VMA_USE_STL_VECTOR
2183 
2184 #define VmaVector std::vector
2185 
2186 template<typename T, typename allocatorT>
2187 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
2188 {
2189  vec.insert(vec.begin() + index, item);
2190 }
2191 
2192 template<typename T, typename allocatorT>
2193 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
2194 {
2195  vec.erase(vec.begin() + index);
2196 }
2197 
2198 #else // #if VMA_USE_STL_VECTOR
2199 
2200 /* Class with interface compatible with subset of std::vector.
2201 T must be POD because constructors and destructors are not called and memcpy is
2202 used for these objects. */
2203 template<typename T, typename AllocatorT>
2204 class VmaVector
2205 {
2206 public:
2207  typedef T value_type;
2208 
2209  VmaVector(const AllocatorT& allocator) :
2210  m_Allocator(allocator),
2211  m_pArray(VMA_NULL),
2212  m_Count(0),
2213  m_Capacity(0)
2214  {
2215  }
2216 
2217  VmaVector(size_t count, const AllocatorT& allocator) :
2218  m_Allocator(allocator),
2219  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2220  m_Count(count),
2221  m_Capacity(count)
2222  {
2223  }
2224 
2225  VmaVector(const VmaVector<T, AllocatorT>& src) :
2226  m_Allocator(src.m_Allocator),
2227  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2228  m_Count(src.m_Count),
2229  m_Capacity(src.m_Count)
2230  {
2231  if(m_Count != 0)
2232  {
2233  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
2234  }
2235  }
2236 
2237  ~VmaVector()
2238  {
2239  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2240  }
2241 
2242  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
2243  {
2244  if(&rhs != this)
2245  {
2246  resize(rhs.m_Count);
2247  if(m_Count != 0)
2248  {
2249  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
2250  }
2251  }
2252  return *this;
2253  }
2254 
2255  bool empty() const { return m_Count == 0; }
2256  size_t size() const { return m_Count; }
2257  T* data() { return m_pArray; }
2258  const T* data() const { return m_pArray; }
2259 
2260  T& operator[](size_t index)
2261  {
2262  VMA_HEAVY_ASSERT(index < m_Count);
2263  return m_pArray[index];
2264  }
2265  const T& operator[](size_t index) const
2266  {
2267  VMA_HEAVY_ASSERT(index < m_Count);
2268  return m_pArray[index];
2269  }
2270 
2271  T& front()
2272  {
2273  VMA_HEAVY_ASSERT(m_Count > 0);
2274  return m_pArray[0];
2275  }
2276  const T& front() const
2277  {
2278  VMA_HEAVY_ASSERT(m_Count > 0);
2279  return m_pArray[0];
2280  }
2281  T& back()
2282  {
2283  VMA_HEAVY_ASSERT(m_Count > 0);
2284  return m_pArray[m_Count - 1];
2285  }
2286  const T& back() const
2287  {
2288  VMA_HEAVY_ASSERT(m_Count > 0);
2289  return m_pArray[m_Count - 1];
2290  }
2291 
2292  void reserve(size_t newCapacity, bool freeMemory = false)
2293  {
2294  newCapacity = VMA_MAX(newCapacity, m_Count);
2295 
2296  if((newCapacity < m_Capacity) && !freeMemory)
2297  {
2298  newCapacity = m_Capacity;
2299  }
2300 
2301  if(newCapacity != m_Capacity)
2302  {
2303  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2304  if(m_Count != 0)
2305  {
2306  memcpy(newArray, m_pArray, m_Count * sizeof(T));
2307  }
2308  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2309  m_Capacity = newCapacity;
2310  m_pArray = newArray;
2311  }
2312  }
2313 
2314  void resize(size_t newCount, bool freeMemory = false)
2315  {
2316  size_t newCapacity = m_Capacity;
2317  if(newCount > m_Capacity)
2318  {
2319  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
2320  }
2321  else if(freeMemory)
2322  {
2323  newCapacity = newCount;
2324  }
2325 
2326  if(newCapacity != m_Capacity)
2327  {
2328  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2329  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2330  if(elementsToCopy != 0)
2331  {
2332  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
2333  }
2334  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2335  m_Capacity = newCapacity;
2336  m_pArray = newArray;
2337  }
2338 
2339  m_Count = newCount;
2340  }
2341 
2342  void clear(bool freeMemory = false)
2343  {
2344  resize(0, freeMemory);
2345  }
2346 
2347  void insert(size_t index, const T& src)
2348  {
2349  VMA_HEAVY_ASSERT(index <= m_Count);
2350  const size_t oldCount = size();
2351  resize(oldCount + 1);
2352  if(index < oldCount)
2353  {
2354  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
2355  }
2356  m_pArray[index] = src;
2357  }
2358 
2359  void remove(size_t index)
2360  {
2361  VMA_HEAVY_ASSERT(index < m_Count);
2362  const size_t oldCount = size();
2363  if(index < oldCount - 1)
2364  {
2365  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
2366  }
2367  resize(oldCount - 1);
2368  }
2369 
2370  void push_back(const T& src)
2371  {
2372  const size_t newIndex = size();
2373  resize(newIndex + 1);
2374  m_pArray[newIndex] = src;
2375  }
2376 
2377  void pop_back()
2378  {
2379  VMA_HEAVY_ASSERT(m_Count > 0);
2380  resize(size() - 1);
2381  }
2382 
2383  void push_front(const T& src)
2384  {
2385  insert(0, src);
2386  }
2387 
2388  void pop_front()
2389  {
2390  VMA_HEAVY_ASSERT(m_Count > 0);
2391  remove(0);
2392  }
2393 
2394  typedef T* iterator;
2395 
2396  iterator begin() { return m_pArray; }
2397  iterator end() { return m_pArray + m_Count; }
2398 
2399 private:
2400  AllocatorT m_Allocator;
2401  T* m_pArray;
2402  size_t m_Count;
2403  size_t m_Capacity;
2404 };
2405 
2406 template<typename T, typename allocatorT>
2407 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
2408 {
2409  vec.insert(index, item);
2410 }
2411 
2412 template<typename T, typename allocatorT>
2413 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
2414 {
2415  vec.remove(index);
2416 }
2417 
2418 #endif // #if VMA_USE_STL_VECTOR
2419 
2420 template<typename CmpLess, typename VectorT>
2421 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
2422 {
2423  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2424  vector.data(),
2425  vector.data() + vector.size(),
2426  value,
2427  CmpLess()) - vector.data();
2428  VmaVectorInsert(vector, indexToInsert, value);
2429  return indexToInsert;
2430 }
2431 
2432 template<typename CmpLess, typename VectorT>
2433 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
2434 {
2435  CmpLess comparator;
2436  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2437  vector.begin(),
2438  vector.end(),
2439  value,
2440  comparator);
2441  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2442  {
2443  size_t indexToRemove = it - vector.begin();
2444  VmaVectorRemove(vector, indexToRemove);
2445  return true;
2446  }
2447  return false;
2448 }
2449 
2450 template<typename CmpLess, typename VectorT>
2451 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
2452 {
2453  CmpLess comparator;
2454  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2455  vector.data(),
2456  vector.data() + vector.size(),
2457  value,
2458  comparator);
2459  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2460  {
2461  return it - vector.begin();
2462  }
2463  else
2464  {
2465  return vector.size();
2466  }
2467 }
2468 
2470 // class VmaPoolAllocator
2471 
2472 /*
2473 Allocator for objects of type T using a list of arrays (pools) to speed up
2474 allocation. Number of elements that can be allocated is not bounded because
2475 allocator can create multiple blocks.
2476 */
2477 template<typename T>
2478 class VmaPoolAllocator
2479 {
2480 public:
2481  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
2482  ~VmaPoolAllocator();
2483  void Clear();
2484  T* Alloc();
2485  void Free(T* ptr);
2486 
2487 private:
2488  union Item
2489  {
2490  uint32_t NextFreeIndex;
2491  T Value;
2492  };
2493 
2494  struct ItemBlock
2495  {
2496  Item* pItems;
2497  uint32_t FirstFreeIndex;
2498  };
2499 
2500  const VkAllocationCallbacks* m_pAllocationCallbacks;
2501  size_t m_ItemsPerBlock;
2502  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2503 
2504  ItemBlock& CreateNewBlock();
2505 };
2506 
2507 template<typename T>
2508 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
2509  m_pAllocationCallbacks(pAllocationCallbacks),
2510  m_ItemsPerBlock(itemsPerBlock),
2511  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2512 {
2513  VMA_ASSERT(itemsPerBlock > 0);
2514 }
2515 
2516 template<typename T>
2517 VmaPoolAllocator<T>::~VmaPoolAllocator()
2518 {
2519  Clear();
2520 }
2521 
2522 template<typename T>
2523 void VmaPoolAllocator<T>::Clear()
2524 {
2525  for(size_t i = m_ItemBlocks.size(); i--; )
2526  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2527  m_ItemBlocks.clear();
2528 }
2529 
2530 template<typename T>
2531 T* VmaPoolAllocator<T>::Alloc()
2532 {
2533  for(size_t i = m_ItemBlocks.size(); i--; )
2534  {
2535  ItemBlock& block = m_ItemBlocks[i];
2536  // This block has some free items: Use first one.
2537  if(block.FirstFreeIndex != UINT32_MAX)
2538  {
2539  Item* const pItem = &block.pItems[block.FirstFreeIndex];
2540  block.FirstFreeIndex = pItem->NextFreeIndex;
2541  return &pItem->Value;
2542  }
2543  }
2544 
2545  // No block has free item: Create new one and use it.
2546  ItemBlock& newBlock = CreateNewBlock();
2547  Item* const pItem = &newBlock.pItems[0];
2548  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2549  return &pItem->Value;
2550 }
2551 
2552 template<typename T>
2553 void VmaPoolAllocator<T>::Free(T* ptr)
2554 {
2555  // Search all memory blocks to find ptr.
2556  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
2557  {
2558  ItemBlock& block = m_ItemBlocks[i];
2559 
2560  // Casting to union.
2561  Item* pItemPtr;
2562  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
2563 
2564  // Check if pItemPtr is in address range of this block.
2565  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2566  {
2567  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
2568  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2569  block.FirstFreeIndex = index;
2570  return;
2571  }
2572  }
2573  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
2574 }
2575 
2576 template<typename T>
2577 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2578 {
2579  ItemBlock newBlock = {
2580  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2581 
2582  m_ItemBlocks.push_back(newBlock);
2583 
2584  // Setup singly-linked list of all free items in this block.
2585  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2586  newBlock.pItems[i].NextFreeIndex = i + 1;
2587  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2588  return m_ItemBlocks.back();
2589 }
2590 
2592 // class VmaRawList, VmaList
2593 
2594 #if VMA_USE_STL_LIST
2595 
2596 #define VmaList std::list
2597 
2598 #else // #if VMA_USE_STL_LIST
2599 
2600 template<typename T>
2601 struct VmaListItem
2602 {
2603  VmaListItem* pPrev;
2604  VmaListItem* pNext;
2605  T Value;
2606 };
2607 
2608 // Doubly linked list.
2609 template<typename T>
2610 class VmaRawList
2611 {
2612 public:
2613  typedef VmaListItem<T> ItemType;
2614 
2615  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
2616  ~VmaRawList();
2617  void Clear();
2618 
2619  size_t GetCount() const { return m_Count; }
2620  bool IsEmpty() const { return m_Count == 0; }
2621 
2622  ItemType* Front() { return m_pFront; }
2623  const ItemType* Front() const { return m_pFront; }
2624  ItemType* Back() { return m_pBack; }
2625  const ItemType* Back() const { return m_pBack; }
2626 
2627  ItemType* PushBack();
2628  ItemType* PushFront();
2629  ItemType* PushBack(const T& value);
2630  ItemType* PushFront(const T& value);
2631  void PopBack();
2632  void PopFront();
2633 
2634  // Item can be null - it means PushBack.
2635  ItemType* InsertBefore(ItemType* pItem);
2636  // Item can be null - it means PushFront.
2637  ItemType* InsertAfter(ItemType* pItem);
2638 
2639  ItemType* InsertBefore(ItemType* pItem, const T& value);
2640  ItemType* InsertAfter(ItemType* pItem, const T& value);
2641 
2642  void Remove(ItemType* pItem);
2643 
2644 private:
2645  const VkAllocationCallbacks* const m_pAllocationCallbacks;
2646  VmaPoolAllocator<ItemType> m_ItemAllocator;
2647  ItemType* m_pFront;
2648  ItemType* m_pBack;
2649  size_t m_Count;
2650 
2651  // Declared not defined, to block copy constructor and assignment operator.
2652  VmaRawList(const VmaRawList<T>& src);
2653  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
2654 };
2655 
2656 template<typename T>
2657 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
2658  m_pAllocationCallbacks(pAllocationCallbacks),
2659  m_ItemAllocator(pAllocationCallbacks, 128),
2660  m_pFront(VMA_NULL),
2661  m_pBack(VMA_NULL),
2662  m_Count(0)
2663 {
2664 }
2665 
2666 template<typename T>
2667 VmaRawList<T>::~VmaRawList()
2668 {
2669  // Intentionally not calling Clear, because that would be unnecessary
2670  // computations to return all items to m_ItemAllocator as free.
2671 }
2672 
2673 template<typename T>
2674 void VmaRawList<T>::Clear()
2675 {
2676  if(IsEmpty() == false)
2677  {
2678  ItemType* pItem = m_pBack;
2679  while(pItem != VMA_NULL)
2680  {
2681  ItemType* const pPrevItem = pItem->pPrev;
2682  m_ItemAllocator.Free(pItem);
2683  pItem = pPrevItem;
2684  }
2685  m_pFront = VMA_NULL;
2686  m_pBack = VMA_NULL;
2687  m_Count = 0;
2688  }
2689 }
2690 
2691 template<typename T>
2692 VmaListItem<T>* VmaRawList<T>::PushBack()
2693 {
2694  ItemType* const pNewItem = m_ItemAllocator.Alloc();
2695  pNewItem->pNext = VMA_NULL;
2696  if(IsEmpty())
2697  {
2698  pNewItem->pPrev = VMA_NULL;
2699  m_pFront = pNewItem;
2700  m_pBack = pNewItem;
2701  m_Count = 1;
2702  }
2703  else
2704  {
2705  pNewItem->pPrev = m_pBack;
2706  m_pBack->pNext = pNewItem;
2707  m_pBack = pNewItem;
2708  ++m_Count;
2709  }
2710  return pNewItem;
2711 }
2712 
2713 template<typename T>
2714 VmaListItem<T>* VmaRawList<T>::PushFront()
2715 {
2716  ItemType* const pNewItem = m_ItemAllocator.Alloc();
2717  pNewItem->pPrev = VMA_NULL;
2718  if(IsEmpty())
2719  {
2720  pNewItem->pNext = VMA_NULL;
2721  m_pFront = pNewItem;
2722  m_pBack = pNewItem;
2723  m_Count = 1;
2724  }
2725  else
2726  {
2727  pNewItem->pNext = m_pFront;
2728  m_pFront->pPrev = pNewItem;
2729  m_pFront = pNewItem;
2730  ++m_Count;
2731  }
2732  return pNewItem;
2733 }
2734 
2735 template<typename T>
2736 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
2737 {
2738  ItemType* const pNewItem = PushBack();
2739  pNewItem->Value = value;
2740  return pNewItem;
2741 }
2742 
2743 template<typename T>
2744 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
2745 {
2746  ItemType* const pNewItem = PushFront();
2747  pNewItem->Value = value;
2748  return pNewItem;
2749 }
2750 
2751 template<typename T>
2752 void VmaRawList<T>::PopBack()
2753 {
2754  VMA_HEAVY_ASSERT(m_Count > 0);
2755  ItemType* const pBackItem = m_pBack;
2756  ItemType* const pPrevItem = pBackItem->pPrev;
2757  if(pPrevItem != VMA_NULL)
2758  {
2759  pPrevItem->pNext = VMA_NULL;
2760  }
2761  m_pBack = pPrevItem;
2762  m_ItemAllocator.Free(pBackItem);
2763  --m_Count;
2764 }
2765 
2766 template<typename T>
2767 void VmaRawList<T>::PopFront()
2768 {
2769  VMA_HEAVY_ASSERT(m_Count > 0);
2770  ItemType* const pFrontItem = m_pFront;
2771  ItemType* const pNextItem = pFrontItem->pNext;
2772  if(pNextItem != VMA_NULL)
2773  {
2774  pNextItem->pPrev = VMA_NULL;
2775  }
2776  m_pFront = pNextItem;
2777  m_ItemAllocator.Free(pFrontItem);
2778  --m_Count;
2779 }
2780 
2781 template<typename T>
2782 void VmaRawList<T>::Remove(ItemType* pItem)
2783 {
2784  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2785  VMA_HEAVY_ASSERT(m_Count > 0);
2786 
2787  if(pItem->pPrev != VMA_NULL)
2788  {
2789  pItem->pPrev->pNext = pItem->pNext;
2790  }
2791  else
2792  {
2793  VMA_HEAVY_ASSERT(m_pFront == pItem);
2794  m_pFront = pItem->pNext;
2795  }
2796 
2797  if(pItem->pNext != VMA_NULL)
2798  {
2799  pItem->pNext->pPrev = pItem->pPrev;
2800  }
2801  else
2802  {
2803  VMA_HEAVY_ASSERT(m_pBack == pItem);
2804  m_pBack = pItem->pPrev;
2805  }
2806 
2807  m_ItemAllocator.Free(pItem);
2808  --m_Count;
2809 }
2810 
2811 template<typename T>
2812 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2813 {
2814  if(pItem != VMA_NULL)
2815  {
2816  ItemType* const prevItem = pItem->pPrev;
2817  ItemType* const newItem = m_ItemAllocator.Alloc();
2818  newItem->pPrev = prevItem;
2819  newItem->pNext = pItem;
2820  pItem->pPrev = newItem;
2821  if(prevItem != VMA_NULL)
2822  {
2823  prevItem->pNext = newItem;
2824  }
2825  else
2826  {
2827  VMA_HEAVY_ASSERT(m_pFront == pItem);
2828  m_pFront = newItem;
2829  }
2830  ++m_Count;
2831  return newItem;
2832  }
2833  else
2834  return PushBack();
2835 }
2836 
2837 template<typename T>
2838 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2839 {
2840  if(pItem != VMA_NULL)
2841  {
2842  ItemType* const nextItem = pItem->pNext;
2843  ItemType* const newItem = m_ItemAllocator.Alloc();
2844  newItem->pNext = nextItem;
2845  newItem->pPrev = pItem;
2846  pItem->pNext = newItem;
2847  if(nextItem != VMA_NULL)
2848  {
2849  nextItem->pPrev = newItem;
2850  }
2851  else
2852  {
2853  VMA_HEAVY_ASSERT(m_pBack == pItem);
2854  m_pBack = newItem;
2855  }
2856  ++m_Count;
2857  return newItem;
2858  }
2859  else
2860  return PushFront();
2861 }
2862 
2863 template<typename T>
2864 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
2865 {
2866  ItemType* const newItem = InsertBefore(pItem);
2867  newItem->Value = value;
2868  return newItem;
2869 }
2870 
2871 template<typename T>
2872 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
2873 {
2874  ItemType* const newItem = InsertAfter(pItem);
2875  newItem->Value = value;
2876  return newItem;
2877 }
2878 
2879 template<typename T, typename AllocatorT>
2880 class VmaList
2881 {
2882 public:
2883  class iterator
2884  {
2885  public:
2886  iterator() :
2887  m_pList(VMA_NULL),
2888  m_pItem(VMA_NULL)
2889  {
2890  }
2891 
2892  T& operator*() const
2893  {
2894  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2895  return m_pItem->Value;
2896  }
2897  T* operator->() const
2898  {
2899  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2900  return &m_pItem->Value;
2901  }
2902 
2903  iterator& operator++()
2904  {
2905  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2906  m_pItem = m_pItem->pNext;
2907  return *this;
2908  }
2909  iterator& operator--()
2910  {
2911  if(m_pItem != VMA_NULL)
2912  {
2913  m_pItem = m_pItem->pPrev;
2914  }
2915  else
2916  {
2917  VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2918  m_pItem = m_pList->Back();
2919  }
2920  return *this;
2921  }
2922 
2923  iterator operator++(int)
2924  {
2925  iterator result = *this;
2926  ++*this;
2927  return result;
2928  }
2929  iterator operator--(int)
2930  {
2931  iterator result = *this;
2932  --*this;
2933  return result;
2934  }
2935 
2936  bool operator==(const iterator& rhs) const
2937  {
2938  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2939  return m_pItem == rhs.m_pItem;
2940  }
2941  bool operator!=(const iterator& rhs) const
2942  {
2943  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2944  return m_pItem != rhs.m_pItem;
2945  }
2946 
2947  private:
2948  VmaRawList<T>* m_pList;
2949  VmaListItem<T>* m_pItem;
2950 
2951  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2952  m_pList(pList),
2953  m_pItem(pItem)
2954  {
2955  }
2956 
2957  friend class VmaList<T, AllocatorT>;
2958  };
2959 
2960  class const_iterator
2961  {
2962  public:
2963  const_iterator() :
2964  m_pList(VMA_NULL),
2965  m_pItem(VMA_NULL)
2966  {
2967  }
2968 
2969  const_iterator(const iterator& src) :
2970  m_pList(src.m_pList),
2971  m_pItem(src.m_pItem)
2972  {
2973  }
2974 
2975  const T& operator*() const
2976  {
2977  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2978  return m_pItem->Value;
2979  }
2980  const T* operator->() const
2981  {
2982  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2983  return &m_pItem->Value;
2984  }
2985 
2986  const_iterator& operator++()
2987  {
2988  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2989  m_pItem = m_pItem->pNext;
2990  return *this;
2991  }
2992  const_iterator& operator--()
2993  {
2994  if(m_pItem != VMA_NULL)
2995  {
2996  m_pItem = m_pItem->pPrev;
2997  }
2998  else
2999  {
3000  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3001  m_pItem = m_pList->Back();
3002  }
3003  return *this;
3004  }
3005 
3006  const_iterator operator++(int)
3007  {
3008  const_iterator result = *this;
3009  ++*this;
3010  return result;
3011  }
3012  const_iterator operator--(int)
3013  {
3014  const_iterator result = *this;
3015  --*this;
3016  return result;
3017  }
3018 
3019  bool operator==(const const_iterator& rhs) const
3020  {
3021  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3022  return m_pItem == rhs.m_pItem;
3023  }
3024  bool operator!=(const const_iterator& rhs) const
3025  {
3026  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3027  return m_pItem != rhs.m_pItem;
3028  }
3029 
3030  private:
3031  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
3032  m_pList(pList),
3033  m_pItem(pItem)
3034  {
3035  }
3036 
3037  const VmaRawList<T>* m_pList;
3038  const VmaListItem<T>* m_pItem;
3039 
3040  friend class VmaList<T, AllocatorT>;
3041  };
3042 
3043  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3044 
3045  bool empty() const { return m_RawList.IsEmpty(); }
3046  size_t size() const { return m_RawList.GetCount(); }
3047 
3048  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
3049  iterator end() { return iterator(&m_RawList, VMA_NULL); }
3050 
3051  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
3052  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
3053 
3054  void clear() { m_RawList.Clear(); }
3055  void push_back(const T& value) { m_RawList.PushBack(value); }
3056  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3057  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3058 
3059 private:
3060  VmaRawList<T> m_RawList;
3061 };
3062 
3063 #endif // #if VMA_USE_STL_LIST
3064 
3066 // class VmaMap
3067 
3068 // Unused in this version.
3069 #if 0
3070 
3071 #if VMA_USE_STL_UNORDERED_MAP
3072 
3073 #define VmaPair std::pair
3074 
3075 #define VMA_MAP_TYPE(KeyT, ValueT) \
3076  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
3077 
3078 #else // #if VMA_USE_STL_UNORDERED_MAP
3079 
3080 template<typename T1, typename T2>
3081 struct VmaPair
3082 {
3083  T1 first;
3084  T2 second;
3085 
3086  VmaPair() : first(), second() { }
3087  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3088 };
3089 
3090 /* Class compatible with subset of interface of std::unordered_map.
3091 KeyT, ValueT must be POD because they will be stored in VmaVector.
3092 */
3093 template<typename KeyT, typename ValueT>
3094 class VmaMap
3095 {
3096 public:
3097  typedef VmaPair<KeyT, ValueT> PairType;
3098  typedef PairType* iterator;
3099 
3100  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3101 
3102  iterator begin() { return m_Vector.begin(); }
3103  iterator end() { return m_Vector.end(); }
3104 
3105  void insert(const PairType& pair);
3106  iterator find(const KeyT& key);
3107  void erase(iterator it);
3108 
3109 private:
3110  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3111 };
3112 
3113 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
3114 
3115 template<typename FirstT, typename SecondT>
3116 struct VmaPairFirstLess
3117 {
3118  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
3119  {
3120  return lhs.first < rhs.first;
3121  }
3122  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
3123  {
3124  return lhs.first < rhsFirst;
3125  }
3126 };
3127 
3128 template<typename KeyT, typename ValueT>
3129 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
3130 {
3131  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3132  m_Vector.data(),
3133  m_Vector.data() + m_Vector.size(),
3134  pair,
3135  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3136  VmaVectorInsert(m_Vector, indexToInsert, pair);
3137 }
3138 
3139 template<typename KeyT, typename ValueT>
3140 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
3141 {
3142  PairType* it = VmaBinaryFindFirstNotLess(
3143  m_Vector.data(),
3144  m_Vector.data() + m_Vector.size(),
3145  key,
3146  VmaPairFirstLess<KeyT, ValueT>());
3147  if((it != m_Vector.end()) && (it->first == key))
3148  {
3149  return it;
3150  }
3151  else
3152  {
3153  return m_Vector.end();
3154  }
3155 }
3156 
3157 template<typename KeyT, typename ValueT>
3158 void VmaMap<KeyT, ValueT>::erase(iterator it)
3159 {
3160  VmaVectorRemove(m_Vector, it - m_Vector.begin());
3161 }
3162 
3163 #endif // #if VMA_USE_STL_UNORDERED_MAP
3164 
3165 #endif // #if 0
3166 
3168 
3169 class VmaDeviceMemoryBlock;
3170 
3171 struct VmaAllocation_T
3172 {
3173 private:
3174  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3175 
3176  enum FLAGS
3177  {
3178  FLAG_USER_DATA_STRING = 0x01,
3179  };
3180 
3181 public:
3182  enum ALLOCATION_TYPE
3183  {
3184  ALLOCATION_TYPE_NONE,
3185  ALLOCATION_TYPE_BLOCK,
3186  ALLOCATION_TYPE_DEDICATED,
3187  };
3188 
3189  VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
3190  m_Alignment(1),
3191  m_Size(0),
3192  m_pUserData(VMA_NULL),
3193  m_LastUseFrameIndex(currentFrameIndex),
3194  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3195  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3196  m_MapCount(0),
3197  m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3198  {
3199  }
3200 
3201  ~VmaAllocation_T()
3202  {
3203  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
3204 
3205  // Check if owned string was freed.
3206  VMA_ASSERT(m_pUserData == VMA_NULL);
3207  }
3208 
3209  void InitBlockAllocation(
3210  VmaPool hPool,
3211  VmaDeviceMemoryBlock* block,
3212  VkDeviceSize offset,
3213  VkDeviceSize alignment,
3214  VkDeviceSize size,
3215  VmaSuballocationType suballocationType,
3216  bool mapped,
3217  bool canBecomeLost)
3218  {
3219  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3220  VMA_ASSERT(block != VMA_NULL);
3221  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3222  m_Alignment = alignment;
3223  m_Size = size;
3224  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3225  m_SuballocationType = (uint8_t)suballocationType;
3226  m_BlockAllocation.m_hPool = hPool;
3227  m_BlockAllocation.m_Block = block;
3228  m_BlockAllocation.m_Offset = offset;
3229  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3230  }
3231 
3232  void InitLost()
3233  {
3234  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3235  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3236  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3237  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3238  m_BlockAllocation.m_Block = VMA_NULL;
3239  m_BlockAllocation.m_Offset = 0;
3240  m_BlockAllocation.m_CanBecomeLost = true;
3241  }
3242 
3243  void ChangeBlockAllocation(
3244  VmaDeviceMemoryBlock* block,
3245  VkDeviceSize offset)
3246  {
3247  VMA_ASSERT(block != VMA_NULL);
3248  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3249  m_BlockAllocation.m_Block = block;
3250  m_BlockAllocation.m_Offset = offset;
3251  }
3252 
3253  // pMappedData not null means allocation is created with MAPPED flag.
3254  void InitDedicatedAllocation(
3255  uint32_t memoryTypeIndex,
3256  VkDeviceMemory hMemory,
3257  VmaSuballocationType suballocationType,
3258  void* pMappedData,
3259  VkDeviceSize size)
3260  {
3261  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3262  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3263  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3264  m_Alignment = 0;
3265  m_Size = size;
3266  m_SuballocationType = (uint8_t)suballocationType;
3267  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3268  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3269  m_DedicatedAllocation.m_hMemory = hMemory;
3270  m_DedicatedAllocation.m_pMappedData = pMappedData;
3271  }
3272 
3273  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
3274  VkDeviceSize GetAlignment() const { return m_Alignment; }
3275  VkDeviceSize GetSize() const { return m_Size; }
3276  bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3277  void* GetUserData() const { return m_pUserData; }
3278  void SetUserData(VmaAllocator hAllocator, void* pUserData);
3279  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
3280 
3281  VmaDeviceMemoryBlock* GetBlock() const
3282  {
3283  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3284  return m_BlockAllocation.m_Block;
3285  }
3286  VkDeviceSize GetOffset() const;
3287  VkDeviceMemory GetMemory() const;
3288  uint32_t GetMemoryTypeIndex() const;
3289  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3290  void* GetMappedData() const;
3291  bool CanBecomeLost() const;
3292  VmaPool GetPool() const;
3293 
3294  uint32_t GetLastUseFrameIndex() const
3295  {
3296  return m_LastUseFrameIndex.load();
3297  }
3298  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3299  {
3300  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3301  }
3302  /*
3303  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
3304  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
3305  - Else, returns false.
3306 
3307  If hAllocation is already lost, assert - you should not call it then.
3308  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
3309  */
3310  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3311 
3312  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
3313  {
3314  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3315  outInfo.blockCount = 1;
3316  outInfo.allocationCount = 1;
3317  outInfo.unusedRangeCount = 0;
3318  outInfo.usedBytes = m_Size;
3319  outInfo.unusedBytes = 0;
3320  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
3321  outInfo.unusedRangeSizeMin = UINT64_MAX;
3322  outInfo.unusedRangeSizeMax = 0;
3323  }
3324 
3325  void BlockAllocMap();
3326  void BlockAllocUnmap();
3327  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
3328  void DedicatedAllocUnmap(VmaAllocator hAllocator);
3329 
3330 private:
3331  VkDeviceSize m_Alignment;
3332  VkDeviceSize m_Size;
3333  void* m_pUserData;
3334  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3335  uint8_t m_Type; // ALLOCATION_TYPE
3336  uint8_t m_SuballocationType; // VmaSuballocationType
3337  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
3338  // Bits with mask 0x7F, used only when ALLOCATION_TYPE_DEDICATED, are reference counter for vmaMapMemory()/vmaUnmapMemory().
3339  uint8_t m_MapCount;
3340  uint8_t m_Flags; // enum FLAGS
3341 
3342  // Allocation out of VmaDeviceMemoryBlock.
3343  struct BlockAllocation
3344  {
3345  VmaPool m_hPool; // Null if belongs to general memory.
3346  VmaDeviceMemoryBlock* m_Block;
3347  VkDeviceSize m_Offset;
3348  bool m_CanBecomeLost;
3349  };
3350 
3351  // Allocation for an object that has its own private VkDeviceMemory.
3352  struct DedicatedAllocation
3353  {
3354  uint32_t m_MemoryTypeIndex;
3355  VkDeviceMemory m_hMemory;
3356  void* m_pMappedData; // Not null means memory is mapped.
3357  };
3358 
3359  union
3360  {
3361  // Allocation out of VmaDeviceMemoryBlock.
3362  BlockAllocation m_BlockAllocation;
3363  // Allocation for an object that has its own private VkDeviceMemory.
3364  DedicatedAllocation m_DedicatedAllocation;
3365  };
3366 
3367  void FreeUserDataString(VmaAllocator hAllocator);
3368 };
3369 
3370 /*
3371 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
3372 allocated memory block or free.
3373 */
3374 struct VmaSuballocation
3375 {
3376  VkDeviceSize offset;
3377  VkDeviceSize size;
3378  VmaAllocation hAllocation;
3379  VmaSuballocationType type;
3380 };
3381 
3382 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3383 
3384 // Cost of one additional allocation lost, as equivalent in bytes.
3385 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3386 
3387 /*
3388 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
3389 
3390 If canMakeOtherLost was false:
3391 - item points to a FREE suballocation.
3392 - itemsToMakeLostCount is 0.
3393 
3394 If canMakeOtherLost was true:
3395 - item points to first of sequence of suballocations, which are either FREE,
3396  or point to VmaAllocations that can become lost.
3397 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
3398  the requested allocation to succeed.
3399 */
3400 struct VmaAllocationRequest
3401 {
3402  VkDeviceSize offset;
3403  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
3404  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
3405  VmaSuballocationList::iterator item;
3406  size_t itemsToMakeLostCount;
3407 
3408  VkDeviceSize CalcCost() const
3409  {
3410  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3411  }
3412 };
3413 
3414 /*
3415 Data structure used for bookkeeping of allocations and unused ranges of memory
3416 in a single VkDeviceMemory block.
3417 */
3418 class VmaBlockMetadata
3419 {
3420 public:
3421  VmaBlockMetadata(VmaAllocator hAllocator);
3422  ~VmaBlockMetadata();
3423  void Init(VkDeviceSize size);
3424 
3425  // Validates all data structures inside this object. If not valid, returns false.
3426  bool Validate() const;
3427  VkDeviceSize GetSize() const { return m_Size; }
3428  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
3429  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
3430  VkDeviceSize GetUnusedRangeSizeMax() const;
3431  // Returns true if this block is empty - contains only single free suballocation.
3432  bool IsEmpty() const;
3433 
3434  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
3435  void AddPoolStats(VmaPoolStats& inoutStats) const;
3436 
3437 #if VMA_STATS_STRING_ENABLED
3438  void PrintDetailedMap(class VmaJsonWriter& json) const;
3439 #endif
3440 
3441  // Creates trivial request for case when block is empty.
3442  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3443 
3444  // Tries to find a place for suballocation with given parameters inside this block.
3445  // If succeeded, fills pAllocationRequest and returns true.
3446  // If failed, returns false.
3447  bool CreateAllocationRequest(
3448  uint32_t currentFrameIndex,
3449  uint32_t frameInUseCount,
3450  VkDeviceSize bufferImageGranularity,
3451  VkDeviceSize allocSize,
3452  VkDeviceSize allocAlignment,
3453  VmaSuballocationType allocType,
3454  bool canMakeOtherLost,
3455  VmaAllocationRequest* pAllocationRequest);
3456 
3457  bool MakeRequestedAllocationsLost(
3458  uint32_t currentFrameIndex,
3459  uint32_t frameInUseCount,
3460  VmaAllocationRequest* pAllocationRequest);
3461 
3462  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3463 
3464  // Makes actual allocation based on request. Request must already be checked and valid.
3465  void Alloc(
3466  const VmaAllocationRequest& request,
3467  VmaSuballocationType type,
3468  VkDeviceSize allocSize,
3469  VmaAllocation hAllocation);
3470 
3471  // Frees suballocation assigned to given memory region.
3472  void Free(const VmaAllocation allocation);
3473 
3474 private:
3475  VkDeviceSize m_Size;
3476  uint32_t m_FreeCount;
3477  VkDeviceSize m_SumFreeSize;
3478  VmaSuballocationList m_Suballocations;
3479  // Suballocations that are free and have size greater than certain threshold.
3480  // Sorted by size, ascending.
3481  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3482 
3483  bool ValidateFreeSuballocationList() const;
3484 
3485  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
3486  // If yes, fills pOffset and returns true. If no, returns false.
3487  bool CheckAllocation(
3488  uint32_t currentFrameIndex,
3489  uint32_t frameInUseCount,
3490  VkDeviceSize bufferImageGranularity,
3491  VkDeviceSize allocSize,
3492  VkDeviceSize allocAlignment,
3493  VmaSuballocationType allocType,
3494  VmaSuballocationList::const_iterator suballocItem,
3495  bool canMakeOtherLost,
3496  VkDeviceSize* pOffset,
3497  size_t* itemsToMakeLostCount,
3498  VkDeviceSize* pSumFreeSize,
3499  VkDeviceSize* pSumItemSize) const;
3500  // Given free suballocation, it merges it with following one, which must also be free.
3501  void MergeFreeWithNext(VmaSuballocationList::iterator item);
3502  // Releases given suballocation, making it free.
3503  // Merges it with adjacent free suballocations if applicable.
3504  // Returns iterator to new free suballocation at this place.
3505  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3506  // Given free suballocation, it inserts it into sorted list of
3507  // m_FreeSuballocationsBySize if it's suitable.
3508  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3509  // Given free suballocation, it removes it from sorted list of
3510  // m_FreeSuballocationsBySize if it's suitable.
3511  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3512 };
3513 
3514 // Helper class that represents mapped memory. Synchronized internally.
3515 class VmaDeviceMemoryMapping
3516 {
3517 public:
3518  VmaDeviceMemoryMapping();
3519  ~VmaDeviceMemoryMapping();
3520 
3521  void* GetMappedData() const { return m_pMappedData; }
3522 
3523  // ppData can be null.
3524  VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData);
3525  void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
3526 
3527 private:
3528  VMA_MUTEX m_Mutex;
3529  uint32_t m_MapCount;
3530  void* m_pMappedData;
3531 };
3532 
3533 /*
3534 Represents a single block of device memory (`VkDeviceMemory`) with all the
3535 data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
3536 
3537 Thread-safety: This class must be externally synchronized.
3538 */
3539 class VmaDeviceMemoryBlock
3540 {
3541 public:
3542  uint32_t m_MemoryTypeIndex;
3543  VkDeviceMemory m_hMemory;
3544  VmaDeviceMemoryMapping m_Mapping;
3545  VmaBlockMetadata m_Metadata;
3546 
3547  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3548 
3549  ~VmaDeviceMemoryBlock()
3550  {
3551  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3552  }
3553 
3554  // Always call after construction.
3555  void Init(
3556  uint32_t newMemoryTypeIndex,
3557  VkDeviceMemory newMemory,
3558  VkDeviceSize newSize);
3559  // Always call before destruction.
3560  void Destroy(VmaAllocator allocator);
3561 
3562  // Validates all data structures inside this object. If not valid, returns false.
3563  bool Validate() const;
3564 
3565  // ppData can be null.
3566  VkResult Map(VmaAllocator hAllocator, void** ppData);
3567  void Unmap(VmaAllocator hAllocator);
3568 };
3569 
3570 struct VmaPointerLess
3571 {
3572  bool operator()(const void* lhs, const void* rhs) const
3573  {
3574  return lhs < rhs;
3575  }
3576 };
3577 
3578 class VmaDefragmentator;
3579 
3580 /*
3581 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
3582 Vulkan memory type.
3583 
3584 Synchronized internally with a mutex.
3585 */
3586 struct VmaBlockVector
3587 {
3588  VmaBlockVector(
3589  VmaAllocator hAllocator,
3590  uint32_t memoryTypeIndex,
3591  VkDeviceSize preferredBlockSize,
3592  size_t minBlockCount,
3593  size_t maxBlockCount,
3594  VkDeviceSize bufferImageGranularity,
3595  uint32_t frameInUseCount,
3596  bool isCustomPool);
3597  ~VmaBlockVector();
3598 
3599  VkResult CreateMinBlocks();
3600 
3601  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
3602  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
3603  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
3604  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
3605 
3606  void GetPoolStats(VmaPoolStats* pStats);
3607 
3608  bool IsEmpty() const { return m_Blocks.empty(); }
3609 
3610  VkResult Allocate(
3611  VmaPool hCurrentPool,
3612  uint32_t currentFrameIndex,
3613  const VkMemoryRequirements& vkMemReq,
3614  const VmaAllocationCreateInfo& createInfo,
3615  VmaSuballocationType suballocType,
3616  VmaAllocation* pAllocation);
3617 
3618  void Free(
3619  VmaAllocation hAllocation);
3620 
3621  // Adds statistics of this BlockVector to pStats.
3622  void AddStats(VmaStats* pStats);
3623 
3624 #if VMA_STATS_STRING_ENABLED
3625  void PrintDetailedMap(class VmaJsonWriter& json);
3626 #endif
3627 
3628  void MakePoolAllocationsLost(
3629  uint32_t currentFrameIndex,
3630  size_t* pLostAllocationCount);
3631 
3632  VmaDefragmentator* EnsureDefragmentator(
3633  VmaAllocator hAllocator,
3634  uint32_t currentFrameIndex);
3635 
3636  VkResult Defragment(
3637  VmaDefragmentationStats* pDefragmentationStats,
3638  VkDeviceSize& maxBytesToMove,
3639  uint32_t& maxAllocationsToMove);
3640 
3641  void DestroyDefragmentator();
3642 
3643 private:
3644  friend class VmaDefragmentator;
3645 
3646  const VmaAllocator m_hAllocator;
3647  const uint32_t m_MemoryTypeIndex;
3648  const VkDeviceSize m_PreferredBlockSize;
3649  const size_t m_MinBlockCount;
3650  const size_t m_MaxBlockCount;
3651  const VkDeviceSize m_BufferImageGranularity;
3652  const uint32_t m_FrameInUseCount;
3653  const bool m_IsCustomPool;
3654  VMA_MUTEX m_Mutex;
3655  // Incrementally sorted by sumFreeSize, ascending.
3656  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3657  /* There can be at most one allocation that is completely empty - a
3658  hysteresis to avoid pessimistic case of alternating creation and destruction
3659  of a VkDeviceMemory. */
3660  bool m_HasEmptyBlock;
3661  VmaDefragmentator* m_pDefragmentator;
3662 
3663  // Finds and removes given block from vector.
3664  void Remove(VmaDeviceMemoryBlock* pBlock);
3665 
3666  // Performs single step in sorting m_Blocks. They may not be fully sorted
3667  // after this call.
3668  void IncrementallySortBlocks();
3669 
3670  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
3671 };
3672 
3673 struct VmaPool_T
3674 {
3675 public:
3676  VmaBlockVector m_BlockVector;
3677 
3678  // Takes ownership.
3679  VmaPool_T(
3680  VmaAllocator hAllocator,
3681  const VmaPoolCreateInfo& createInfo);
3682  ~VmaPool_T();
3683 
3684  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
3685 
3686 #if VMA_STATS_STRING_ENABLED
3687  //void PrintDetailedMap(class VmaStringBuilder& sb);
3688 #endif
3689 };
3690 
3691 class VmaDefragmentator
3692 {
3693  const VmaAllocator m_hAllocator;
3694  VmaBlockVector* const m_pBlockVector;
3695  uint32_t m_CurrentFrameIndex;
3696  VkDeviceSize m_BytesMoved;
3697  uint32_t m_AllocationsMoved;
3698 
3699  struct AllocationInfo
3700  {
3701  VmaAllocation m_hAllocation;
3702  VkBool32* m_pChanged;
3703 
3704  AllocationInfo() :
3705  m_hAllocation(VK_NULL_HANDLE),
3706  m_pChanged(VMA_NULL)
3707  {
3708  }
3709  };
3710 
3711  struct AllocationInfoSizeGreater
3712  {
3713  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
3714  {
3715  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3716  }
3717  };
3718 
3719  // Used between AddAllocation and Defragment.
3720  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3721 
3722  struct BlockInfo
3723  {
3724  VmaDeviceMemoryBlock* m_pBlock;
3725  bool m_HasNonMovableAllocations;
3726  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3727 
3728  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
3729  m_pBlock(VMA_NULL),
3730  m_HasNonMovableAllocations(true),
3731  m_Allocations(pAllocationCallbacks),
3732  m_pMappedDataForDefragmentation(VMA_NULL)
3733  {
3734  }
3735 
3736  void CalcHasNonMovableAllocations()
3737  {
3738  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3739  const size_t defragmentAllocCount = m_Allocations.size();
3740  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3741  }
3742 
3743  void SortAllocationsBySizeDescecnding()
3744  {
3745  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3746  }
3747 
3748  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
3749  void Unmap(VmaAllocator hAllocator);
3750 
3751  private:
3752  // Not null if mapped for defragmentation only, not originally mapped.
3753  void* m_pMappedDataForDefragmentation;
3754  };
3755 
3756  struct BlockPointerLess
3757  {
3758  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
3759  {
3760  return pLhsBlockInfo->m_pBlock < pRhsBlock;
3761  }
3762  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
3763  {
3764  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3765  }
3766  };
3767 
3768  // 1. Blocks with some non-movable allocations go first.
3769  // 2. Blocks with smaller sumFreeSize go first.
3770  struct BlockInfoCompareMoveDestination
3771  {
3772  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
3773  {
3774  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3775  {
3776  return true;
3777  }
3778  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3779  {
3780  return false;
3781  }
3782  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3783  {
3784  return true;
3785  }
3786  return false;
3787  }
3788  };
3789 
3790  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3791  BlockInfoVector m_Blocks;
3792 
3793  VkResult DefragmentRound(
3794  VkDeviceSize maxBytesToMove,
3795  uint32_t maxAllocationsToMove);
3796 
3797  static bool MoveMakesSense(
3798  size_t dstBlockIndex, VkDeviceSize dstOffset,
3799  size_t srcBlockIndex, VkDeviceSize srcOffset);
3800 
3801 public:
3802  VmaDefragmentator(
3803  VmaAllocator hAllocator,
3804  VmaBlockVector* pBlockVector,
3805  uint32_t currentFrameIndex);
3806 
3807  ~VmaDefragmentator();
3808 
3809  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
3810  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
3811 
3812  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3813 
3814  VkResult Defragment(
3815  VkDeviceSize maxBytesToMove,
3816  uint32_t maxAllocationsToMove);
3817 };
3818 
3819 // Main allocator object.
3820 struct VmaAllocator_T
3821 {
3822  bool m_UseMutex;
3823  bool m_UseKhrDedicatedAllocation;
3824  VkDevice m_hDevice;
3825  bool m_AllocationCallbacksSpecified;
3826  VkAllocationCallbacks m_AllocationCallbacks;
3827  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
3828 
3829  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
3830  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3831  VMA_MUTEX m_HeapSizeLimitMutex;
3832 
3833  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3834  VkPhysicalDeviceMemoryProperties m_MemProps;
3835 
3836  // Default pools.
3837  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3838 
3839  // Each vector is sorted by memory (handle value).
3840  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3841  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3842  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3843 
3844  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
3845  ~VmaAllocator_T();
3846 
3847  const VkAllocationCallbacks* GetAllocationCallbacks() const
3848  {
3849  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3850  }
3851  const VmaVulkanFunctions& GetVulkanFunctions() const
3852  {
3853  return m_VulkanFunctions;
3854  }
3855 
3856  VkDeviceSize GetBufferImageGranularity() const
3857  {
3858  return VMA_MAX(
3859  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3860  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3861  }
3862 
3863  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
3864  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
3865 
3866  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
3867  {
3868  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3869  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3870  }
3871 
3872  void GetBufferMemoryRequirements(
3873  VkBuffer hBuffer,
3874  VkMemoryRequirements& memReq,
3875  bool& requiresDedicatedAllocation,
3876  bool& prefersDedicatedAllocation) const;
3877  void GetImageMemoryRequirements(
3878  VkImage hImage,
3879  VkMemoryRequirements& memReq,
3880  bool& requiresDedicatedAllocation,
3881  bool& prefersDedicatedAllocation) const;
3882 
3883  // Main allocation function.
3884  VkResult AllocateMemory(
3885  const VkMemoryRequirements& vkMemReq,
3886  bool requiresDedicatedAllocation,
3887  bool prefersDedicatedAllocation,
3888  VkBuffer dedicatedBuffer,
3889  VkImage dedicatedImage,
3890  const VmaAllocationCreateInfo& createInfo,
3891  VmaSuballocationType suballocType,
3892  VmaAllocation* pAllocation);
3893 
3894  // Main deallocation function.
3895  void FreeMemory(const VmaAllocation allocation);
3896 
3897  void CalculateStats(VmaStats* pStats);
3898 
3899 #if VMA_STATS_STRING_ENABLED
3900  void PrintDetailedMap(class VmaJsonWriter& json);
3901 #endif
3902 
3903  VkResult Defragment(
3904  VmaAllocation* pAllocations,
3905  size_t allocationCount,
3906  VkBool32* pAllocationsChanged,
3907  const VmaDefragmentationInfo* pDefragmentationInfo,
3908  VmaDefragmentationStats* pDefragmentationStats);
3909 
3910  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
3911 
3912  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
3913  void DestroyPool(VmaPool pool);
3914  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
3915 
3916  void SetCurrentFrameIndex(uint32_t frameIndex);
3917 
3918  void MakePoolAllocationsLost(
3919  VmaPool hPool,
3920  size_t* pLostAllocationCount);
3921 
3922  void CreateLostAllocation(VmaAllocation* pAllocation);
3923 
3924  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3925  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3926 
3927  VkResult Map(VmaAllocation hAllocation, void** ppData);
3928  void Unmap(VmaAllocation hAllocation);
3929 
3930 private:
3931  VkDeviceSize m_PreferredLargeHeapBlockSize;
3932  VkDeviceSize m_PreferredSmallHeapBlockSize;
3933 
3934  VkPhysicalDevice m_PhysicalDevice;
3935  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3936 
3937  VMA_MUTEX m_PoolsMutex;
3938  // Protected by m_PoolsMutex. Sorted by pointer value.
3939  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3940 
3941  VmaVulkanFunctions m_VulkanFunctions;
3942 
3943  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
3944 
3945  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3946 
3947  VkResult AllocateMemoryOfType(
3948  const VkMemoryRequirements& vkMemReq,
3949  bool dedicatedAllocation,
3950  VkBuffer dedicatedBuffer,
3951  VkImage dedicatedImage,
3952  const VmaAllocationCreateInfo& createInfo,
3953  uint32_t memTypeIndex,
3954  VmaSuballocationType suballocType,
3955  VmaAllocation* pAllocation);
3956 
3957  // Allocates and registers new VkDeviceMemory specifically for single allocation.
3958  VkResult AllocateDedicatedMemory(
3959  VkDeviceSize size,
3960  VmaSuballocationType suballocType,
3961  uint32_t memTypeIndex,
3962  bool map,
3963  bool isUserDataString,
3964  void* pUserData,
3965  VkBuffer dedicatedBuffer,
3966  VkImage dedicatedImage,
3967  VmaAllocation* pAllocation);
3968 
3969  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
3970  void FreeDedicatedMemory(VmaAllocation allocation);
3971 };
3972 
3974 // Memory allocation #2 after VmaAllocator_T definition
3975 
3976 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
3977 {
3978  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3979 }
3980 
3981 static void VmaFree(VmaAllocator hAllocator, void* ptr)
3982 {
3983  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3984 }
3985 
3986 template<typename T>
3987 static T* VmaAllocate(VmaAllocator hAllocator)
3988 {
3989  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
3990 }
3991 
3992 template<typename T>
3993 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
3994 {
3995  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
3996 }
3997 
3998 template<typename T>
3999 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4000 {
4001  if(ptr != VMA_NULL)
4002  {
4003  ptr->~T();
4004  VmaFree(hAllocator, ptr);
4005  }
4006 }
4007 
4008 template<typename T>
4009 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
4010 {
4011  if(ptr != VMA_NULL)
4012  {
4013  for(size_t i = count; i--; )
4014  ptr[i].~T();
4015  VmaFree(hAllocator, ptr);
4016  }
4017 }
4018 
4020 // VmaStringBuilder
4021 
4022 #if VMA_STATS_STRING_ENABLED
4023 
4024 class VmaStringBuilder
4025 {
4026 public:
4027  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4028  size_t GetLength() const { return m_Data.size(); }
4029  const char* GetData() const { return m_Data.data(); }
4030 
4031  void Add(char ch) { m_Data.push_back(ch); }
4032  void Add(const char* pStr);
4033  void AddNewLine() { Add('\n'); }
4034  void AddNumber(uint32_t num);
4035  void AddNumber(uint64_t num);
4036  void AddPointer(const void* ptr);
4037 
4038 private:
4039  VmaVector< char, VmaStlAllocator<char> > m_Data;
4040 };
4041 
4042 void VmaStringBuilder::Add(const char* pStr)
4043 {
4044  const size_t strLen = strlen(pStr);
4045  if(strLen > 0)
4046  {
4047  const size_t oldCount = m_Data.size();
4048  m_Data.resize(oldCount + strLen);
4049  memcpy(m_Data.data() + oldCount, pStr, strLen);
4050  }
4051 }
4052 
4053 void VmaStringBuilder::AddNumber(uint32_t num)
4054 {
4055  char buf[11];
4056  VmaUint32ToStr(buf, sizeof(buf), num);
4057  Add(buf);
4058 }
4059 
4060 void VmaStringBuilder::AddNumber(uint64_t num)
4061 {
4062  char buf[21];
4063  VmaUint64ToStr(buf, sizeof(buf), num);
4064  Add(buf);
4065 }
4066 
4067 void VmaStringBuilder::AddPointer(const void* ptr)
4068 {
4069  char buf[21];
4070  VmaPtrToStr(buf, sizeof(buf), ptr);
4071  Add(buf);
4072 }
4073 
4074 #endif // #if VMA_STATS_STRING_ENABLED
4075 
4077 // VmaJsonWriter
4078 
4079 #if VMA_STATS_STRING_ENABLED
4080 
4081 class VmaJsonWriter
4082 {
4083 public:
4084  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4085  ~VmaJsonWriter();
4086 
4087  void BeginObject(bool singleLine = false);
4088  void EndObject();
4089 
4090  void BeginArray(bool singleLine = false);
4091  void EndArray();
4092 
4093  void WriteString(const char* pStr);
4094  void BeginString(const char* pStr = VMA_NULL);
4095  void ContinueString(const char* pStr);
4096  void ContinueString(uint32_t n);
4097  void ContinueString(uint64_t n);
4098  void ContinueString_Pointer(const void* ptr);
4099  void EndString(const char* pStr = VMA_NULL);
4100 
4101  void WriteNumber(uint32_t n);
4102  void WriteNumber(uint64_t n);
4103  void WriteBool(bool b);
4104  void WriteNull();
4105 
4106 private:
4107  static const char* const INDENT;
4108 
4109  enum COLLECTION_TYPE
4110  {
4111  COLLECTION_TYPE_OBJECT,
4112  COLLECTION_TYPE_ARRAY,
4113  };
4114  struct StackItem
4115  {
4116  COLLECTION_TYPE type;
4117  uint32_t valueCount;
4118  bool singleLineMode;
4119  };
4120 
4121  VmaStringBuilder& m_SB;
4122  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4123  bool m_InsideString;
4124 
4125  void BeginValue(bool isString);
4126  void WriteIndent(bool oneLess = false);
4127 };
4128 
4129 const char* const VmaJsonWriter::INDENT = " ";
4130 
4131 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4132  m_SB(sb),
4133  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4134  m_InsideString(false)
4135 {
4136 }
4137 
4138 VmaJsonWriter::~VmaJsonWriter()
4139 {
4140  VMA_ASSERT(!m_InsideString);
4141  VMA_ASSERT(m_Stack.empty());
4142 }
4143 
4144 void VmaJsonWriter::BeginObject(bool singleLine)
4145 {
4146  VMA_ASSERT(!m_InsideString);
4147 
4148  BeginValue(false);
4149  m_SB.Add('{');
4150 
4151  StackItem item;
4152  item.type = COLLECTION_TYPE_OBJECT;
4153  item.valueCount = 0;
4154  item.singleLineMode = singleLine;
4155  m_Stack.push_back(item);
4156 }
4157 
4158 void VmaJsonWriter::EndObject()
4159 {
4160  VMA_ASSERT(!m_InsideString);
4161 
4162  WriteIndent(true);
4163  m_SB.Add('}');
4164 
4165  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4166  m_Stack.pop_back();
4167 }
4168 
4169 void VmaJsonWriter::BeginArray(bool singleLine)
4170 {
4171  VMA_ASSERT(!m_InsideString);
4172 
4173  BeginValue(false);
4174  m_SB.Add('[');
4175 
4176  StackItem item;
4177  item.type = COLLECTION_TYPE_ARRAY;
4178  item.valueCount = 0;
4179  item.singleLineMode = singleLine;
4180  m_Stack.push_back(item);
4181 }
4182 
4183 void VmaJsonWriter::EndArray()
4184 {
4185  VMA_ASSERT(!m_InsideString);
4186 
4187  WriteIndent(true);
4188  m_SB.Add(']');
4189 
4190  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4191  m_Stack.pop_back();
4192 }
4193 
4194 void VmaJsonWriter::WriteString(const char* pStr)
4195 {
4196  BeginString(pStr);
4197  EndString();
4198 }
4199 
4200 void VmaJsonWriter::BeginString(const char* pStr)
4201 {
4202  VMA_ASSERT(!m_InsideString);
4203 
4204  BeginValue(true);
4205  m_SB.Add('"');
4206  m_InsideString = true;
4207  if(pStr != VMA_NULL && pStr[0] != '\0')
4208  {
4209  ContinueString(pStr);
4210  }
4211 }
4212 
4213 void VmaJsonWriter::ContinueString(const char* pStr)
4214 {
4215  VMA_ASSERT(m_InsideString);
4216 
4217  const size_t strLen = strlen(pStr);
4218  for(size_t i = 0; i < strLen; ++i)
4219  {
4220  char ch = pStr[i];
4221  if(ch == '\'')
4222  {
4223  m_SB.Add("\\\\");
4224  }
4225  else if(ch == '"')
4226  {
4227  m_SB.Add("\\\"");
4228  }
4229  else if(ch >= 32)
4230  {
4231  m_SB.Add(ch);
4232  }
4233  else switch(ch)
4234  {
4235  case '\b':
4236  m_SB.Add("\\b");
4237  break;
4238  case '\f':
4239  m_SB.Add("\\f");
4240  break;
4241  case '\n':
4242  m_SB.Add("\\n");
4243  break;
4244  case '\r':
4245  m_SB.Add("\\r");
4246  break;
4247  case '\t':
4248  m_SB.Add("\\t");
4249  break;
4250  default:
4251  VMA_ASSERT(0 && "Character not currently supported.");
4252  break;
4253  }
4254  }
4255 }
4256 
4257 void VmaJsonWriter::ContinueString(uint32_t n)
4258 {
4259  VMA_ASSERT(m_InsideString);
4260  m_SB.AddNumber(n);
4261 }
4262 
4263 void VmaJsonWriter::ContinueString(uint64_t n)
4264 {
4265  VMA_ASSERT(m_InsideString);
4266  m_SB.AddNumber(n);
4267 }
4268 
4269 void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
4270 {
4271  VMA_ASSERT(m_InsideString);
4272  m_SB.AddPointer(ptr);
4273 }
4274 
4275 void VmaJsonWriter::EndString(const char* pStr)
4276 {
4277  VMA_ASSERT(m_InsideString);
4278  if(pStr != VMA_NULL && pStr[0] != '\0')
4279  {
4280  ContinueString(pStr);
4281  }
4282  m_SB.Add('"');
4283  m_InsideString = false;
4284 }
4285 
4286 void VmaJsonWriter::WriteNumber(uint32_t n)
4287 {
4288  VMA_ASSERT(!m_InsideString);
4289  BeginValue(false);
4290  m_SB.AddNumber(n);
4291 }
4292 
4293 void VmaJsonWriter::WriteNumber(uint64_t n)
4294 {
4295  VMA_ASSERT(!m_InsideString);
4296  BeginValue(false);
4297  m_SB.AddNumber(n);
4298 }
4299 
4300 void VmaJsonWriter::WriteBool(bool b)
4301 {
4302  VMA_ASSERT(!m_InsideString);
4303  BeginValue(false);
4304  m_SB.Add(b ? "true" : "false");
4305 }
4306 
4307 void VmaJsonWriter::WriteNull()
4308 {
4309  VMA_ASSERT(!m_InsideString);
4310  BeginValue(false);
4311  m_SB.Add("null");
4312 }
4313 
4314 void VmaJsonWriter::BeginValue(bool isString)
4315 {
4316  if(!m_Stack.empty())
4317  {
4318  StackItem& currItem = m_Stack.back();
4319  if(currItem.type == COLLECTION_TYPE_OBJECT &&
4320  currItem.valueCount % 2 == 0)
4321  {
4322  VMA_ASSERT(isString);
4323  }
4324 
4325  if(currItem.type == COLLECTION_TYPE_OBJECT &&
4326  currItem.valueCount % 2 != 0)
4327  {
4328  m_SB.Add(": ");
4329  }
4330  else if(currItem.valueCount > 0)
4331  {
4332  m_SB.Add(", ");
4333  WriteIndent();
4334  }
4335  else
4336  {
4337  WriteIndent();
4338  }
4339  ++currItem.valueCount;
4340  }
4341 }
4342 
4343 void VmaJsonWriter::WriteIndent(bool oneLess)
4344 {
4345  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4346  {
4347  m_SB.AddNewLine();
4348 
4349  size_t count = m_Stack.size();
4350  if(count > 0 && oneLess)
4351  {
4352  --count;
4353  }
4354  for(size_t i = 0; i < count; ++i)
4355  {
4356  m_SB.Add(INDENT);
4357  }
4358  }
4359 }
4360 
4361 #endif // #if VMA_STATS_STRING_ENABLED
4362 
4364 
4365 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
4366 {
4367  if(IsUserDataString())
4368  {
4369  VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4370 
4371  FreeUserDataString(hAllocator);
4372 
4373  if(pUserData != VMA_NULL)
4374  {
4375  const char* const newStrSrc = (char*)pUserData;
4376  const size_t newStrLen = strlen(newStrSrc);
4377  char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
4378  memcpy(newStrDst, newStrSrc, newStrLen + 1);
4379  m_pUserData = newStrDst;
4380  }
4381  }
4382  else
4383  {
4384  m_pUserData = pUserData;
4385  }
4386 }
4387 
4388 VkDeviceSize VmaAllocation_T::GetOffset() const
4389 {
4390  switch(m_Type)
4391  {
4392  case ALLOCATION_TYPE_BLOCK:
4393  return m_BlockAllocation.m_Offset;
4394  case ALLOCATION_TYPE_DEDICATED:
4395  return 0;
4396  default:
4397  VMA_ASSERT(0);
4398  return 0;
4399  }
4400 }
4401 
4402 VkDeviceMemory VmaAllocation_T::GetMemory() const
4403 {
4404  switch(m_Type)
4405  {
4406  case ALLOCATION_TYPE_BLOCK:
4407  return m_BlockAllocation.m_Block->m_hMemory;
4408  case ALLOCATION_TYPE_DEDICATED:
4409  return m_DedicatedAllocation.m_hMemory;
4410  default:
4411  VMA_ASSERT(0);
4412  return VK_NULL_HANDLE;
4413  }
4414 }
4415 
4416 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
4417 {
4418  switch(m_Type)
4419  {
4420  case ALLOCATION_TYPE_BLOCK:
4421  return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4422  case ALLOCATION_TYPE_DEDICATED:
4423  return m_DedicatedAllocation.m_MemoryTypeIndex;
4424  default:
4425  VMA_ASSERT(0);
4426  return UINT32_MAX;
4427  }
4428 }
4429 
4430 void* VmaAllocation_T::GetMappedData() const
4431 {
4432  switch(m_Type)
4433  {
4434  case ALLOCATION_TYPE_BLOCK:
4435  if(m_MapCount != 0)
4436  {
4437  void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4438  VMA_ASSERT(pBlockData != VMA_NULL);
4439  return (char*)pBlockData + m_BlockAllocation.m_Offset;
4440  }
4441  else
4442  {
4443  return VMA_NULL;
4444  }
4445  break;
4446  case ALLOCATION_TYPE_DEDICATED:
4447  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4448  return m_DedicatedAllocation.m_pMappedData;
4449  default:
4450  VMA_ASSERT(0);
4451  return VMA_NULL;
4452  }
4453 }
4454 
4455 bool VmaAllocation_T::CanBecomeLost() const
4456 {
4457  switch(m_Type)
4458  {
4459  case ALLOCATION_TYPE_BLOCK:
4460  return m_BlockAllocation.m_CanBecomeLost;
4461  case ALLOCATION_TYPE_DEDICATED:
4462  return false;
4463  default:
4464  VMA_ASSERT(0);
4465  return false;
4466  }
4467 }
4468 
4469 VmaPool VmaAllocation_T::GetPool() const
4470 {
4471  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4472  return m_BlockAllocation.m_hPool;
4473 }
4474 
4475 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4476 {
4477  VMA_ASSERT(CanBecomeLost());
4478 
4479  /*
4480  Warning: This is a carefully designed algorithm.
4481  Do not modify unless you really know what you're doing :)
4482  */
4483  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4484  for(;;)
4485  {
4486  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4487  {
4488  VMA_ASSERT(0);
4489  return false;
4490  }
4491  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4492  {
4493  return false;
4494  }
4495  else // Last use time earlier than current time.
4496  {
4497  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4498  {
4499  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
4500  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
4501  return true;
4502  }
4503  }
4504  }
4505 }
4506 
4507 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4508 {
4509  VMA_ASSERT(IsUserDataString());
4510  if(m_pUserData != VMA_NULL)
4511  {
4512  char* const oldStr = (char*)m_pUserData;
4513  const size_t oldStrLen = strlen(oldStr);
4514  vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4515  m_pUserData = VMA_NULL;
4516  }
4517 }
4518 
4519 void VmaAllocation_T::BlockAllocMap()
4520 {
4521  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4522 
4523  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4524  {
4525  ++m_MapCount;
4526  }
4527  else
4528  {
4529  VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
4530  }
4531 }
4532 
4533 void VmaAllocation_T::BlockAllocUnmap()
4534 {
4535  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4536 
4537  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4538  {
4539  --m_MapCount;
4540  }
4541  else
4542  {
4543  VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
4544  }
4545 }
4546 
4547 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
4548 {
4549  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4550 
4551  if(m_MapCount != 0)
4552  {
4553  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4554  {
4555  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4556  *ppData = m_DedicatedAllocation.m_pMappedData;
4557  ++m_MapCount;
4558  return VK_SUCCESS;
4559  }
4560  else
4561  {
4562  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
4563  return VK_ERROR_MEMORY_MAP_FAILED;
4564  }
4565  }
4566  else
4567  {
4568  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4569  hAllocator->m_hDevice,
4570  m_DedicatedAllocation.m_hMemory,
4571  0, // offset
4572  VK_WHOLE_SIZE,
4573  0, // flags
4574  ppData);
4575  if(result == VK_SUCCESS)
4576  {
4577  m_DedicatedAllocation.m_pMappedData = *ppData;
4578  m_MapCount = 1;
4579  }
4580  return result;
4581  }
4582 }
4583 
4584 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4585 {
4586  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4587 
4588  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4589  {
4590  --m_MapCount;
4591  if(m_MapCount == 0)
4592  {
4593  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4594  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4595  hAllocator->m_hDevice,
4596  m_DedicatedAllocation.m_hMemory);
4597  }
4598  }
4599  else
4600  {
4601  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
4602  }
4603 }
4604 
4605 #if VMA_STATS_STRING_ENABLED
4606 
4607 // Correspond to values of enum VmaSuballocationType.
4608 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4609  "FREE",
4610  "UNKNOWN",
4611  "BUFFER",
4612  "IMAGE_UNKNOWN",
4613  "IMAGE_LINEAR",
4614  "IMAGE_OPTIMAL",
4615 };
4616 
4617 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
4618 {
4619  json.BeginObject();
4620 
4621  json.WriteString("Blocks");
4622  json.WriteNumber(stat.blockCount);
4623 
4624  json.WriteString("Allocations");
4625  json.WriteNumber(stat.allocationCount);
4626 
4627  json.WriteString("UnusedRanges");
4628  json.WriteNumber(stat.unusedRangeCount);
4629 
4630  json.WriteString("UsedBytes");
4631  json.WriteNumber(stat.usedBytes);
4632 
4633  json.WriteString("UnusedBytes");
4634  json.WriteNumber(stat.unusedBytes);
4635 
4636  if(stat.allocationCount > 1)
4637  {
4638  json.WriteString("AllocationSize");
4639  json.BeginObject(true);
4640  json.WriteString("Min");
4641  json.WriteNumber(stat.allocationSizeMin);
4642  json.WriteString("Avg");
4643  json.WriteNumber(stat.allocationSizeAvg);
4644  json.WriteString("Max");
4645  json.WriteNumber(stat.allocationSizeMax);
4646  json.EndObject();
4647  }
4648 
4649  if(stat.unusedRangeCount > 1)
4650  {
4651  json.WriteString("UnusedRangeSize");
4652  json.BeginObject(true);
4653  json.WriteString("Min");
4654  json.WriteNumber(stat.unusedRangeSizeMin);
4655  json.WriteString("Avg");
4656  json.WriteNumber(stat.unusedRangeSizeAvg);
4657  json.WriteString("Max");
4658  json.WriteNumber(stat.unusedRangeSizeMax);
4659  json.EndObject();
4660  }
4661 
4662  json.EndObject();
4663 }
4664 
4665 #endif // #if VMA_STATS_STRING_ENABLED
4666 
4667 struct VmaSuballocationItemSizeLess
4668 {
4669  bool operator()(
4670  const VmaSuballocationList::iterator lhs,
4671  const VmaSuballocationList::iterator rhs) const
4672  {
4673  return lhs->size < rhs->size;
4674  }
4675  bool operator()(
4676  const VmaSuballocationList::iterator lhs,
4677  VkDeviceSize rhsSize) const
4678  {
4679  return lhs->size < rhsSize;
4680  }
4681 };
4682 
4684 // class VmaBlockMetadata
4685 
4686 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4687  m_Size(0),
4688  m_FreeCount(0),
4689  m_SumFreeSize(0),
4690  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4691  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4692 {
4693 }
4694 
4695 VmaBlockMetadata::~VmaBlockMetadata()
4696 {
4697 }
4698 
4699 void VmaBlockMetadata::Init(VkDeviceSize size)
4700 {
4701  m_Size = size;
4702  m_FreeCount = 1;
4703  m_SumFreeSize = size;
4704 
4705  VmaSuballocation suballoc = {};
4706  suballoc.offset = 0;
4707  suballoc.size = size;
4708  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4709  suballoc.hAllocation = VK_NULL_HANDLE;
4710 
4711  m_Suballocations.push_back(suballoc);
4712  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4713  --suballocItem;
4714  m_FreeSuballocationsBySize.push_back(suballocItem);
4715 }
4716 
4717 bool VmaBlockMetadata::Validate() const
4718 {
4719  if(m_Suballocations.empty())
4720  {
4721  return false;
4722  }
4723 
4724  // Expected offset of new suballocation as calculates from previous ones.
4725  VkDeviceSize calculatedOffset = 0;
4726  // Expected number of free suballocations as calculated from traversing their list.
4727  uint32_t calculatedFreeCount = 0;
4728  // Expected sum size of free suballocations as calculated from traversing their list.
4729  VkDeviceSize calculatedSumFreeSize = 0;
4730  // Expected number of free suballocations that should be registered in
4731  // m_FreeSuballocationsBySize calculated from traversing their list.
4732  size_t freeSuballocationsToRegister = 0;
4733  // True if previous visisted suballocation was free.
4734  bool prevFree = false;
4735 
4736  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4737  suballocItem != m_Suballocations.cend();
4738  ++suballocItem)
4739  {
4740  const VmaSuballocation& subAlloc = *suballocItem;
4741 
4742  // Actual offset of this suballocation doesn't match expected one.
4743  if(subAlloc.offset != calculatedOffset)
4744  {
4745  return false;
4746  }
4747 
4748  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4749  // Two adjacent free suballocations are invalid. They should be merged.
4750  if(prevFree && currFree)
4751  {
4752  return false;
4753  }
4754  prevFree = currFree;
4755 
4756  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4757  {
4758  return false;
4759  }
4760 
4761  if(currFree)
4762  {
4763  calculatedSumFreeSize += subAlloc.size;
4764  ++calculatedFreeCount;
4765  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4766  {
4767  ++freeSuballocationsToRegister;
4768  }
4769  }
4770 
4771  calculatedOffset += subAlloc.size;
4772  }
4773 
4774  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
4775  // match expected one.
4776  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4777  {
4778  return false;
4779  }
4780 
4781  VkDeviceSize lastSize = 0;
4782  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4783  {
4784  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4785 
4786  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
4787  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4788  {
4789  return false;
4790  }
4791  // They must be sorted by size ascending.
4792  if(suballocItem->size < lastSize)
4793  {
4794  return false;
4795  }
4796 
4797  lastSize = suballocItem->size;
4798  }
4799 
4800  // Check if totals match calculacted values.
4801  return
4802  ValidateFreeSuballocationList() &&
4803  (calculatedOffset == m_Size) &&
4804  (calculatedSumFreeSize == m_SumFreeSize) &&
4805  (calculatedFreeCount == m_FreeCount);
4806 }
4807 
4808 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
4809 {
4810  if(!m_FreeSuballocationsBySize.empty())
4811  {
4812  return m_FreeSuballocationsBySize.back()->size;
4813  }
4814  else
4815  {
4816  return 0;
4817  }
4818 }
4819 
4820 bool VmaBlockMetadata::IsEmpty() const
4821 {
4822  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4823 }
4824 
4825 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
4826 {
4827  outInfo.blockCount = 1;
4828 
4829  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4830  outInfo.allocationCount = rangeCount - m_FreeCount;
4831  outInfo.unusedRangeCount = m_FreeCount;
4832 
4833  outInfo.unusedBytes = m_SumFreeSize;
4834  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
4835 
4836  outInfo.allocationSizeMin = UINT64_MAX;
4837  outInfo.allocationSizeMax = 0;
4838  outInfo.unusedRangeSizeMin = UINT64_MAX;
4839  outInfo.unusedRangeSizeMax = 0;
4840 
4841  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4842  suballocItem != m_Suballocations.cend();
4843  ++suballocItem)
4844  {
4845  const VmaSuballocation& suballoc = *suballocItem;
4846  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4847  {
4848  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
4849  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
4850  }
4851  else
4852  {
4853  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
4854  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
4855  }
4856  }
4857 }
4858 
4859 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
4860 {
4861  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4862 
4863  inoutStats.size += m_Size;
4864  inoutStats.unusedSize += m_SumFreeSize;
4865  inoutStats.allocationCount += rangeCount - m_FreeCount;
4866  inoutStats.unusedRangeCount += m_FreeCount;
4867  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
4868 }
4869 
4870 #if VMA_STATS_STRING_ENABLED
4871 
4872 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
4873 {
4874  json.BeginObject();
4875 
4876  json.WriteString("TotalBytes");
4877  json.WriteNumber(m_Size);
4878 
4879  json.WriteString("UnusedBytes");
4880  json.WriteNumber(m_SumFreeSize);
4881 
4882  json.WriteString("Allocations");
4883  json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4884 
4885  json.WriteString("UnusedRanges");
4886  json.WriteNumber(m_FreeCount);
4887 
4888  json.WriteString("Suballocations");
4889  json.BeginArray();
4890  size_t i = 0;
4891  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4892  suballocItem != m_Suballocations.cend();
4893  ++suballocItem, ++i)
4894  {
4895  json.BeginObject(true);
4896 
4897  json.WriteString("Type");
4898  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4899 
4900  json.WriteString("Size");
4901  json.WriteNumber(suballocItem->size);
4902 
4903  json.WriteString("Offset");
4904  json.WriteNumber(suballocItem->offset);
4905 
4906  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4907  {
4908  const void* pUserData = suballocItem->hAllocation->GetUserData();
4909  if(pUserData != VMA_NULL)
4910  {
4911  json.WriteString("UserData");
4912  if(suballocItem->hAllocation->IsUserDataString())
4913  {
4914  json.WriteString((const char*)pUserData);
4915  }
4916  else
4917  {
4918  json.BeginString();
4919  json.ContinueString_Pointer(pUserData);
4920  json.EndString();
4921  }
4922  }
4923  }
4924 
4925  json.EndObject();
4926  }
4927  json.EndArray();
4928 
4929  json.EndObject();
4930 }
4931 
4932 #endif // #if VMA_STATS_STRING_ENABLED
4933 
4934 /*
4935 How many suitable free suballocations to analyze before choosing best one.
4936 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
4937  be chosen.
4938 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
4939  suballocations will be analized and best one will be chosen.
4940 - Any other value is also acceptable.
4941 */
4942 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
4943 
4944 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4945 {
4946  VMA_ASSERT(IsEmpty());
4947  pAllocationRequest->offset = 0;
4948  pAllocationRequest->sumFreeSize = m_SumFreeSize;
4949  pAllocationRequest->sumItemSize = 0;
4950  pAllocationRequest->item = m_Suballocations.begin();
4951  pAllocationRequest->itemsToMakeLostCount = 0;
4952 }
4953 
4954 bool VmaBlockMetadata::CreateAllocationRequest(
4955  uint32_t currentFrameIndex,
4956  uint32_t frameInUseCount,
4957  VkDeviceSize bufferImageGranularity,
4958  VkDeviceSize allocSize,
4959  VkDeviceSize allocAlignment,
4960  VmaSuballocationType allocType,
4961  bool canMakeOtherLost,
4962  VmaAllocationRequest* pAllocationRequest)
4963 {
4964  VMA_ASSERT(allocSize > 0);
4965  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4966  VMA_ASSERT(pAllocationRequest != VMA_NULL);
4967  VMA_HEAVY_ASSERT(Validate());
4968 
4969  // There is not enough total free space in this block to fullfill the request: Early return.
4970  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
4971  {
4972  return false;
4973  }
4974 
4975  // New algorithm, efficiently searching freeSuballocationsBySize.
4976  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4977  if(freeSuballocCount > 0)
4978  {
4979  if(VMA_BEST_FIT)
4980  {
4981  // Find first free suballocation with size not less than allocSize.
4982  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
4983  m_FreeSuballocationsBySize.data(),
4984  m_FreeSuballocationsBySize.data() + freeSuballocCount,
4985  allocSize,
4986  VmaSuballocationItemSizeLess());
4987  size_t index = it - m_FreeSuballocationsBySize.data();
4988  for(; index < freeSuballocCount; ++index)
4989  {
4990  if(CheckAllocation(
4991  currentFrameIndex,
4992  frameInUseCount,
4993  bufferImageGranularity,
4994  allocSize,
4995  allocAlignment,
4996  allocType,
4997  m_FreeSuballocationsBySize[index],
4998  false, // canMakeOtherLost
4999  &pAllocationRequest->offset,
5000  &pAllocationRequest->itemsToMakeLostCount,
5001  &pAllocationRequest->sumFreeSize,
5002  &pAllocationRequest->sumItemSize))
5003  {
5004  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5005  return true;
5006  }
5007  }
5008  }
5009  else
5010  {
5011  // Search staring from biggest suballocations.
5012  for(size_t index = freeSuballocCount; index--; )
5013  {
5014  if(CheckAllocation(
5015  currentFrameIndex,
5016  frameInUseCount,
5017  bufferImageGranularity,
5018  allocSize,
5019  allocAlignment,
5020  allocType,
5021  m_FreeSuballocationsBySize[index],
5022  false, // canMakeOtherLost
5023  &pAllocationRequest->offset,
5024  &pAllocationRequest->itemsToMakeLostCount,
5025  &pAllocationRequest->sumFreeSize,
5026  &pAllocationRequest->sumItemSize))
5027  {
5028  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5029  return true;
5030  }
5031  }
5032  }
5033  }
5034 
5035  if(canMakeOtherLost)
5036  {
5037  // Brute-force algorithm. TODO: Come up with something better.
5038 
5039  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5040  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5041 
5042  VmaAllocationRequest tmpAllocRequest = {};
5043  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5044  suballocIt != m_Suballocations.end();
5045  ++suballocIt)
5046  {
5047  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5048  suballocIt->hAllocation->CanBecomeLost())
5049  {
5050  if(CheckAllocation(
5051  currentFrameIndex,
5052  frameInUseCount,
5053  bufferImageGranularity,
5054  allocSize,
5055  allocAlignment,
5056  allocType,
5057  suballocIt,
5058  canMakeOtherLost,
5059  &tmpAllocRequest.offset,
5060  &tmpAllocRequest.itemsToMakeLostCount,
5061  &tmpAllocRequest.sumFreeSize,
5062  &tmpAllocRequest.sumItemSize))
5063  {
5064  tmpAllocRequest.item = suballocIt;
5065 
5066  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5067  {
5068  *pAllocationRequest = tmpAllocRequest;
5069  }
5070  }
5071  }
5072  }
5073 
5074  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5075  {
5076  return true;
5077  }
5078  }
5079 
5080  return false;
5081 }
5082 
5083 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5084  uint32_t currentFrameIndex,
5085  uint32_t frameInUseCount,
5086  VmaAllocationRequest* pAllocationRequest)
5087 {
5088  while(pAllocationRequest->itemsToMakeLostCount > 0)
5089  {
5090  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5091  {
5092  ++pAllocationRequest->item;
5093  }
5094  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5095  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5096  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5097  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5098  {
5099  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5100  --pAllocationRequest->itemsToMakeLostCount;
5101  }
5102  else
5103  {
5104  return false;
5105  }
5106  }
5107 
5108  VMA_HEAVY_ASSERT(Validate());
5109  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5110  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5111 
5112  return true;
5113 }
5114 
5115 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5116 {
5117  uint32_t lostAllocationCount = 0;
5118  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5119  it != m_Suballocations.end();
5120  ++it)
5121  {
5122  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5123  it->hAllocation->CanBecomeLost() &&
5124  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5125  {
5126  it = FreeSuballocation(it);
5127  ++lostAllocationCount;
5128  }
5129  }
5130  return lostAllocationCount;
5131 }
5132 
5133 void VmaBlockMetadata::Alloc(
5134  const VmaAllocationRequest& request,
5135  VmaSuballocationType type,
5136  VkDeviceSize allocSize,
5137  VmaAllocation hAllocation)
5138 {
5139  VMA_ASSERT(request.item != m_Suballocations.end());
5140  VmaSuballocation& suballoc = *request.item;
5141  // Given suballocation is a free block.
5142  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5143  // Given offset is inside this suballocation.
5144  VMA_ASSERT(request.offset >= suballoc.offset);
5145  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5146  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5147  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5148 
5149  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
5150  // it to become used.
5151  UnregisterFreeSuballocation(request.item);
5152 
5153  suballoc.offset = request.offset;
5154  suballoc.size = allocSize;
5155  suballoc.type = type;
5156  suballoc.hAllocation = hAllocation;
5157 
5158  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
5159  if(paddingEnd)
5160  {
5161  VmaSuballocation paddingSuballoc = {};
5162  paddingSuballoc.offset = request.offset + allocSize;
5163  paddingSuballoc.size = paddingEnd;
5164  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5165  VmaSuballocationList::iterator next = request.item;
5166  ++next;
5167  const VmaSuballocationList::iterator paddingEndItem =
5168  m_Suballocations.insert(next, paddingSuballoc);
5169  RegisterFreeSuballocation(paddingEndItem);
5170  }
5171 
5172  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
5173  if(paddingBegin)
5174  {
5175  VmaSuballocation paddingSuballoc = {};
5176  paddingSuballoc.offset = request.offset - paddingBegin;
5177  paddingSuballoc.size = paddingBegin;
5178  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5179  const VmaSuballocationList::iterator paddingBeginItem =
5180  m_Suballocations.insert(request.item, paddingSuballoc);
5181  RegisterFreeSuballocation(paddingBeginItem);
5182  }
5183 
5184  // Update totals.
5185  m_FreeCount = m_FreeCount - 1;
5186  if(paddingBegin > 0)
5187  {
5188  ++m_FreeCount;
5189  }
5190  if(paddingEnd > 0)
5191  {
5192  ++m_FreeCount;
5193  }
5194  m_SumFreeSize -= allocSize;
5195 }
5196 
5197 void VmaBlockMetadata::Free(const VmaAllocation allocation)
5198 {
5199  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5200  suballocItem != m_Suballocations.end();
5201  ++suballocItem)
5202  {
5203  VmaSuballocation& suballoc = *suballocItem;
5204  if(suballoc.hAllocation == allocation)
5205  {
5206  FreeSuballocation(suballocItem);
5207  VMA_HEAVY_ASSERT(Validate());
5208  return;
5209  }
5210  }
5211  VMA_ASSERT(0 && "Not found!");
5212 }
5213 
5214 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
5215 {
5216  VkDeviceSize lastSize = 0;
5217  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5218  {
5219  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5220 
5221  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5222  {
5223  VMA_ASSERT(0);
5224  return false;
5225  }
5226  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5227  {
5228  VMA_ASSERT(0);
5229  return false;
5230  }
5231  if(it->size < lastSize)
5232  {
5233  VMA_ASSERT(0);
5234  return false;
5235  }
5236 
5237  lastSize = it->size;
5238  }
5239  return true;
5240 }
5241 
5242 bool VmaBlockMetadata::CheckAllocation(
5243  uint32_t currentFrameIndex,
5244  uint32_t frameInUseCount,
5245  VkDeviceSize bufferImageGranularity,
5246  VkDeviceSize allocSize,
5247  VkDeviceSize allocAlignment,
5248  VmaSuballocationType allocType,
5249  VmaSuballocationList::const_iterator suballocItem,
5250  bool canMakeOtherLost,
5251  VkDeviceSize* pOffset,
5252  size_t* itemsToMakeLostCount,
5253  VkDeviceSize* pSumFreeSize,
5254  VkDeviceSize* pSumItemSize) const
5255 {
5256  VMA_ASSERT(allocSize > 0);
5257  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5258  VMA_ASSERT(suballocItem != m_Suballocations.cend());
5259  VMA_ASSERT(pOffset != VMA_NULL);
5260 
5261  *itemsToMakeLostCount = 0;
5262  *pSumFreeSize = 0;
5263  *pSumItemSize = 0;
5264 
5265  if(canMakeOtherLost)
5266  {
5267  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5268  {
5269  *pSumFreeSize = suballocItem->size;
5270  }
5271  else
5272  {
5273  if(suballocItem->hAllocation->CanBecomeLost() &&
5274  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5275  {
5276  ++*itemsToMakeLostCount;
5277  *pSumItemSize = suballocItem->size;
5278  }
5279  else
5280  {
5281  return false;
5282  }
5283  }
5284 
5285  // Remaining size is too small for this request: Early return.
5286  if(m_Size - suballocItem->offset < allocSize)
5287  {
5288  return false;
5289  }
5290 
5291  // Start from offset equal to beginning of this suballocation.
5292  *pOffset = suballocItem->offset;
5293 
5294  // Apply VMA_DEBUG_MARGIN at the beginning.
5295  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5296  {
5297  *pOffset += VMA_DEBUG_MARGIN;
5298  }
5299 
5300  // Apply alignment.
5301  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5302  *pOffset = VmaAlignUp(*pOffset, alignment);
5303 
5304  // Check previous suballocations for BufferImageGranularity conflicts.
5305  // Make bigger alignment if necessary.
5306  if(bufferImageGranularity > 1)
5307  {
5308  bool bufferImageGranularityConflict = false;
5309  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5310  while(prevSuballocItem != m_Suballocations.cbegin())
5311  {
5312  --prevSuballocItem;
5313  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5314  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5315  {
5316  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5317  {
5318  bufferImageGranularityConflict = true;
5319  break;
5320  }
5321  }
5322  else
5323  // Already on previous page.
5324  break;
5325  }
5326  if(bufferImageGranularityConflict)
5327  {
5328  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5329  }
5330  }
5331 
5332  // Now that we have final *pOffset, check if we are past suballocItem.
5333  // If yes, return false - this function should be called for another suballocItem as starting point.
5334  if(*pOffset >= suballocItem->offset + suballocItem->size)
5335  {
5336  return false;
5337  }
5338 
5339  // Calculate padding at the beginning based on current offset.
5340  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5341 
5342  // Calculate required margin at the end if this is not last suballocation.
5343  VmaSuballocationList::const_iterator next = suballocItem;
5344  ++next;
5345  const VkDeviceSize requiredEndMargin =
5346  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5347 
5348  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5349  // Another early return check.
5350  if(suballocItem->offset + totalSize > m_Size)
5351  {
5352  return false;
5353  }
5354 
5355  // Advance lastSuballocItem until desired size is reached.
5356  // Update itemsToMakeLostCount.
5357  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5358  if(totalSize > suballocItem->size)
5359  {
5360  VkDeviceSize remainingSize = totalSize - suballocItem->size;
5361  while(remainingSize > 0)
5362  {
5363  ++lastSuballocItem;
5364  if(lastSuballocItem == m_Suballocations.cend())
5365  {
5366  return false;
5367  }
5368  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5369  {
5370  *pSumFreeSize += lastSuballocItem->size;
5371  }
5372  else
5373  {
5374  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5375  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5376  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5377  {
5378  ++*itemsToMakeLostCount;
5379  *pSumItemSize += lastSuballocItem->size;
5380  }
5381  else
5382  {
5383  return false;
5384  }
5385  }
5386  remainingSize = (lastSuballocItem->size < remainingSize) ?
5387  remainingSize - lastSuballocItem->size : 0;
5388  }
5389  }
5390 
5391  // Check next suballocations for BufferImageGranularity conflicts.
5392  // If conflict exists, we must mark more allocations lost or fail.
5393  if(bufferImageGranularity > 1)
5394  {
5395  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5396  ++nextSuballocItem;
5397  while(nextSuballocItem != m_Suballocations.cend())
5398  {
5399  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5400  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5401  {
5402  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5403  {
5404  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5405  if(nextSuballoc.hAllocation->CanBecomeLost() &&
5406  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5407  {
5408  ++*itemsToMakeLostCount;
5409  }
5410  else
5411  {
5412  return false;
5413  }
5414  }
5415  }
5416  else
5417  {
5418  // Already on next page.
5419  break;
5420  }
5421  ++nextSuballocItem;
5422  }
5423  }
5424  }
5425  else
5426  {
5427  const VmaSuballocation& suballoc = *suballocItem;
5428  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5429 
5430  *pSumFreeSize = suballoc.size;
5431 
5432  // Size of this suballocation is too small for this request: Early return.
5433  if(suballoc.size < allocSize)
5434  {
5435  return false;
5436  }
5437 
5438  // Start from offset equal to beginning of this suballocation.
5439  *pOffset = suballoc.offset;
5440 
5441  // Apply VMA_DEBUG_MARGIN at the beginning.
5442  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5443  {
5444  *pOffset += VMA_DEBUG_MARGIN;
5445  }
5446 
5447  // Apply alignment.
5448  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5449  *pOffset = VmaAlignUp(*pOffset, alignment);
5450 
5451  // Check previous suballocations for BufferImageGranularity conflicts.
5452  // Make bigger alignment if necessary.
5453  if(bufferImageGranularity > 1)
5454  {
5455  bool bufferImageGranularityConflict = false;
5456  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5457  while(prevSuballocItem != m_Suballocations.cbegin())
5458  {
5459  --prevSuballocItem;
5460  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5461  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5462  {
5463  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5464  {
5465  bufferImageGranularityConflict = true;
5466  break;
5467  }
5468  }
5469  else
5470  // Already on previous page.
5471  break;
5472  }
5473  if(bufferImageGranularityConflict)
5474  {
5475  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5476  }
5477  }
5478 
5479  // Calculate padding at the beginning based on current offset.
5480  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5481 
5482  // Calculate required margin at the end if this is not last suballocation.
5483  VmaSuballocationList::const_iterator next = suballocItem;
5484  ++next;
5485  const VkDeviceSize requiredEndMargin =
5486  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5487 
5488  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
5489  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5490  {
5491  return false;
5492  }
5493 
5494  // Check next suballocations for BufferImageGranularity conflicts.
5495  // If conflict exists, allocation cannot be made here.
5496  if(bufferImageGranularity > 1)
5497  {
5498  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5499  ++nextSuballocItem;
5500  while(nextSuballocItem != m_Suballocations.cend())
5501  {
5502  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5503  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5504  {
5505  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5506  {
5507  return false;
5508  }
5509  }
5510  else
5511  {
5512  // Already on next page.
5513  break;
5514  }
5515  ++nextSuballocItem;
5516  }
5517  }
5518  }
5519 
5520  // All tests passed: Success. pOffset is already filled.
5521  return true;
5522 }
5523 
5524 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5525 {
5526  VMA_ASSERT(item != m_Suballocations.end());
5527  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5528 
5529  VmaSuballocationList::iterator nextItem = item;
5530  ++nextItem;
5531  VMA_ASSERT(nextItem != m_Suballocations.end());
5532  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5533 
5534  item->size += nextItem->size;
5535  --m_FreeCount;
5536  m_Suballocations.erase(nextItem);
5537 }
5538 
5539 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5540 {
5541  // Change this suballocation to be marked as free.
5542  VmaSuballocation& suballoc = *suballocItem;
5543  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5544  suballoc.hAllocation = VK_NULL_HANDLE;
5545 
5546  // Update totals.
5547  ++m_FreeCount;
5548  m_SumFreeSize += suballoc.size;
5549 
5550  // Merge with previous and/or next suballocation if it's also free.
5551  bool mergeWithNext = false;
5552  bool mergeWithPrev = false;
5553 
5554  VmaSuballocationList::iterator nextItem = suballocItem;
5555  ++nextItem;
5556  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5557  {
5558  mergeWithNext = true;
5559  }
5560 
5561  VmaSuballocationList::iterator prevItem = suballocItem;
5562  if(suballocItem != m_Suballocations.begin())
5563  {
5564  --prevItem;
5565  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5566  {
5567  mergeWithPrev = true;
5568  }
5569  }
5570 
5571  if(mergeWithNext)
5572  {
5573  UnregisterFreeSuballocation(nextItem);
5574  MergeFreeWithNext(suballocItem);
5575  }
5576 
5577  if(mergeWithPrev)
5578  {
5579  UnregisterFreeSuballocation(prevItem);
5580  MergeFreeWithNext(prevItem);
5581  RegisterFreeSuballocation(prevItem);
5582  return prevItem;
5583  }
5584  else
5585  {
5586  RegisterFreeSuballocation(suballocItem);
5587  return suballocItem;
5588  }
5589 }
5590 
5591 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5592 {
5593  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5594  VMA_ASSERT(item->size > 0);
5595 
5596  // You may want to enable this validation at the beginning or at the end of
5597  // this function, depending on what do you want to check.
5598  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5599 
5600  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5601  {
5602  if(m_FreeSuballocationsBySize.empty())
5603  {
5604  m_FreeSuballocationsBySize.push_back(item);
5605  }
5606  else
5607  {
5608  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5609  }
5610  }
5611 
5612  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5613 }
5614 
5615 
5616 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5617 {
5618  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5619  VMA_ASSERT(item->size > 0);
5620 
5621  // You may want to enable this validation at the beginning or at the end of
5622  // this function, depending on what do you want to check.
5623  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5624 
5625  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5626  {
5627  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
5628  m_FreeSuballocationsBySize.data(),
5629  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5630  item,
5631  VmaSuballocationItemSizeLess());
5632  for(size_t index = it - m_FreeSuballocationsBySize.data();
5633  index < m_FreeSuballocationsBySize.size();
5634  ++index)
5635  {
5636  if(m_FreeSuballocationsBySize[index] == item)
5637  {
5638  VmaVectorRemove(m_FreeSuballocationsBySize, index);
5639  return;
5640  }
5641  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
5642  }
5643  VMA_ASSERT(0 && "Not found.");
5644  }
5645 
5646  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5647 }
5648 
5650 // class VmaDeviceMemoryMapping
5651 
5652 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5653  m_MapCount(0),
5654  m_pMappedData(VMA_NULL)
5655 {
5656 }
5657 
5658 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5659 {
5660  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
5661 }
5662 
5663 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData)
5664 {
5665  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5666  if(m_MapCount != 0)
5667  {
5668  ++m_MapCount;
5669  VMA_ASSERT(m_pMappedData != VMA_NULL);
5670  if(ppData != VMA_NULL)
5671  {
5672  *ppData = m_pMappedData;
5673  }
5674  return VK_SUCCESS;
5675  }
5676  else
5677  {
5678  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5679  hAllocator->m_hDevice,
5680  hMemory,
5681  0, // offset
5682  VK_WHOLE_SIZE,
5683  0, // flags
5684  &m_pMappedData);
5685  if(result == VK_SUCCESS)
5686  {
5687  if(ppData != VMA_NULL)
5688  {
5689  *ppData = m_pMappedData;
5690  }
5691  m_MapCount = 1;
5692  }
5693  return result;
5694  }
5695 }
5696 
5697 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
5698 {
5699  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5700  if(m_MapCount != 0)
5701  {
5702  if(--m_MapCount == 0)
5703  {
5704  m_pMappedData = VMA_NULL;
5705  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5706  }
5707  }
5708  else
5709  {
5710  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
5711  }
5712 }
5713 
5715 // class VmaDeviceMemoryBlock
5716 
5717 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5718  m_MemoryTypeIndex(UINT32_MAX),
5719  m_hMemory(VK_NULL_HANDLE),
5720  m_Metadata(hAllocator)
5721 {
5722 }
5723 
5724 void VmaDeviceMemoryBlock::Init(
5725  uint32_t newMemoryTypeIndex,
5726  VkDeviceMemory newMemory,
5727  VkDeviceSize newSize)
5728 {
5729  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5730 
5731  m_MemoryTypeIndex = newMemoryTypeIndex;
5732  m_hMemory = newMemory;
5733 
5734  m_Metadata.Init(newSize);
5735 }
5736 
5737 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5738 {
5739  // This is the most important assert in the entire library.
5740  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
5741  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
5742 
5743  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5744  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5745  m_hMemory = VK_NULL_HANDLE;
5746 }
5747 
5748 bool VmaDeviceMemoryBlock::Validate() const
5749 {
5750  if((m_hMemory == VK_NULL_HANDLE) ||
5751  (m_Metadata.GetSize() == 0))
5752  {
5753  return false;
5754  }
5755 
5756  return m_Metadata.Validate();
5757 }
5758 
5759 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, void** ppData)
5760 {
5761  return m_Mapping.Map(hAllocator, m_hMemory, ppData);
5762 }
5763 
5764 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
5765 {
5766  m_Mapping.Unmap(hAllocator, m_hMemory);
5767 }
5768 
5769 static void InitStatInfo(VmaStatInfo& outInfo)
5770 {
5771  memset(&outInfo, 0, sizeof(outInfo));
5772  outInfo.allocationSizeMin = UINT64_MAX;
5773  outInfo.unusedRangeSizeMin = UINT64_MAX;
5774 }
5775 
5776 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
5777 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
5778 {
5779  inoutInfo.blockCount += srcInfo.blockCount;
5780  inoutInfo.allocationCount += srcInfo.allocationCount;
5781  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
5782  inoutInfo.usedBytes += srcInfo.usedBytes;
5783  inoutInfo.unusedBytes += srcInfo.unusedBytes;
5784  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
5785  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
5786  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
5787  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
5788 }
5789 
5790 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
5791 {
5792  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
5793  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
5794  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
5795  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
5796 }
5797 
5798 VmaPool_T::VmaPool_T(
5799  VmaAllocator hAllocator,
5800  const VmaPoolCreateInfo& createInfo) :
5801  m_BlockVector(
5802  hAllocator,
5803  createInfo.memoryTypeIndex,
5804  createInfo.blockSize,
5805  createInfo.minBlockCount,
5806  createInfo.maxBlockCount,
5807  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
5808  createInfo.frameInUseCount,
5809  true) // isCustomPool
5810 {
5811 }
5812 
5813 VmaPool_T::~VmaPool_T()
5814 {
5815 }
5816 
5817 #if VMA_STATS_STRING_ENABLED
5818 
5819 #endif // #if VMA_STATS_STRING_ENABLED
5820 
5821 VmaBlockVector::VmaBlockVector(
5822  VmaAllocator hAllocator,
5823  uint32_t memoryTypeIndex,
5824  VkDeviceSize preferredBlockSize,
5825  size_t minBlockCount,
5826  size_t maxBlockCount,
5827  VkDeviceSize bufferImageGranularity,
5828  uint32_t frameInUseCount,
5829  bool isCustomPool) :
5830  m_hAllocator(hAllocator),
5831  m_MemoryTypeIndex(memoryTypeIndex),
5832  m_PreferredBlockSize(preferredBlockSize),
5833  m_MinBlockCount(minBlockCount),
5834  m_MaxBlockCount(maxBlockCount),
5835  m_BufferImageGranularity(bufferImageGranularity),
5836  m_FrameInUseCount(frameInUseCount),
5837  m_IsCustomPool(isCustomPool),
5838  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5839  m_HasEmptyBlock(false),
5840  m_pDefragmentator(VMA_NULL)
5841 {
5842 }
5843 
5844 VmaBlockVector::~VmaBlockVector()
5845 {
5846  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5847 
5848  for(size_t i = m_Blocks.size(); i--; )
5849  {
5850  m_Blocks[i]->Destroy(m_hAllocator);
5851  vma_delete(m_hAllocator, m_Blocks[i]);
5852  }
5853 }
5854 
5855 VkResult VmaBlockVector::CreateMinBlocks()
5856 {
5857  for(size_t i = 0; i < m_MinBlockCount; ++i)
5858  {
5859  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5860  if(res != VK_SUCCESS)
5861  {
5862  return res;
5863  }
5864  }
5865  return VK_SUCCESS;
5866 }
5867 
5868 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
5869 {
5870  pStats->size = 0;
5871  pStats->unusedSize = 0;
5872  pStats->allocationCount = 0;
5873  pStats->unusedRangeCount = 0;
5874  pStats->unusedRangeSizeMax = 0;
5875 
5876  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5877 
5878  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5879  {
5880  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
5881  VMA_ASSERT(pBlock);
5882  VMA_HEAVY_ASSERT(pBlock->Validate());
5883  pBlock->m_Metadata.AddPoolStats(*pStats);
5884  }
5885 }
5886 
5887 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5888 
5889 VkResult VmaBlockVector::Allocate(
5890  VmaPool hCurrentPool,
5891  uint32_t currentFrameIndex,
5892  const VkMemoryRequirements& vkMemReq,
5893  const VmaAllocationCreateInfo& createInfo,
5894  VmaSuballocationType suballocType,
5895  VmaAllocation* pAllocation)
5896 {
5897  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
5898  const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
5899 
5900  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5901 
5902  // 1. Search existing allocations. Try to allocate without making other allocations lost.
5903  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
5904  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5905  {
5906  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
5907  VMA_ASSERT(pCurrBlock);
5908  VmaAllocationRequest currRequest = {};
5909  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5910  currentFrameIndex,
5911  m_FrameInUseCount,
5912  m_BufferImageGranularity,
5913  vkMemReq.size,
5914  vkMemReq.alignment,
5915  suballocType,
5916  false, // canMakeOtherLost
5917  &currRequest))
5918  {
5919  // Allocate from pCurrBlock.
5920  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5921 
5922  if(mapped)
5923  {
5924  VkResult res = pCurrBlock->Map(m_hAllocator, nullptr);
5925  if(res != VK_SUCCESS)
5926  {
5927  return res;
5928  }
5929  }
5930 
5931  // We no longer have an empty Allocation.
5932  if(pCurrBlock->m_Metadata.IsEmpty())
5933  {
5934  m_HasEmptyBlock = false;
5935  }
5936 
5937  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5938  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5939  (*pAllocation)->InitBlockAllocation(
5940  hCurrentPool,
5941  pCurrBlock,
5942  currRequest.offset,
5943  vkMemReq.alignment,
5944  vkMemReq.size,
5945  suballocType,
5946  mapped,
5947  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
5948  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5949  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
5950  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
5951  return VK_SUCCESS;
5952  }
5953  }
5954 
5955  const bool canCreateNewBlock =
5956  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
5957  (m_Blocks.size() < m_MaxBlockCount);
5958 
5959  // 2. Try to create new block.
5960  if(canCreateNewBlock)
5961  {
5962  // 2.1. Start with full preferredBlockSize.
5963  VkDeviceSize blockSize = m_PreferredBlockSize;
5964  size_t newBlockIndex = 0;
5965  VkResult res = CreateBlock(blockSize, &newBlockIndex);
5966  // Allocating blocks of other sizes is allowed only in default pools.
5967  // In custom pools block size is fixed.
5968  if(res < 0 && m_IsCustomPool == false)
5969  {
5970  // 2.2. Try half the size.
5971  blockSize /= 2;
5972  if(blockSize >= vkMemReq.size)
5973  {
5974  res = CreateBlock(blockSize, &newBlockIndex);
5975  if(res < 0)
5976  {
5977  // 2.3. Try quarter the size.
5978  blockSize /= 2;
5979  if(blockSize >= vkMemReq.size)
5980  {
5981  res = CreateBlock(blockSize, &newBlockIndex);
5982  }
5983  }
5984  }
5985  }
5986  if(res == VK_SUCCESS)
5987  {
5988  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
5989  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5990 
5991  if(mapped)
5992  {
5993  res = pBlock->Map(m_hAllocator, nullptr);
5994  if(res != VK_SUCCESS)
5995  {
5996  return res;
5997  }
5998  }
5999 
6000  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
6001  VmaAllocationRequest allocRequest;
6002  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6003  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6004  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6005  (*pAllocation)->InitBlockAllocation(
6006  hCurrentPool,
6007  pBlock,
6008  allocRequest.offset,
6009  vkMemReq.alignment,
6010  vkMemReq.size,
6011  suballocType,
6012  mapped,
6013  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
6014  VMA_HEAVY_ASSERT(pBlock->Validate());
6015  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
6016  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
6017  return VK_SUCCESS;
6018  }
6019  }
6020 
6021  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
6022 
6023  // 3. Try to allocate from existing blocks with making other allocations lost.
6024  if(canMakeOtherLost)
6025  {
6026  uint32_t tryIndex = 0;
6027  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6028  {
6029  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6030  VmaAllocationRequest bestRequest = {};
6031  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6032 
6033  // 1. Search existing allocations.
6034  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
6035  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6036  {
6037  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
6038  VMA_ASSERT(pCurrBlock);
6039  VmaAllocationRequest currRequest = {};
6040  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6041  currentFrameIndex,
6042  m_FrameInUseCount,
6043  m_BufferImageGranularity,
6044  vkMemReq.size,
6045  vkMemReq.alignment,
6046  suballocType,
6047  canMakeOtherLost,
6048  &currRequest))
6049  {
6050  const VkDeviceSize currRequestCost = currRequest.CalcCost();
6051  if(pBestRequestBlock == VMA_NULL ||
6052  currRequestCost < bestRequestCost)
6053  {
6054  pBestRequestBlock = pCurrBlock;
6055  bestRequest = currRequest;
6056  bestRequestCost = currRequestCost;
6057 
6058  if(bestRequestCost == 0)
6059  {
6060  break;
6061  }
6062  }
6063  }
6064  }
6065 
6066  if(pBestRequestBlock != VMA_NULL)
6067  {
6068  if(mapped)
6069  {
6070  VkResult res = pBestRequestBlock->Map(m_hAllocator, nullptr);
6071  if(res != VK_SUCCESS)
6072  {
6073  return res;
6074  }
6075  }
6076 
6077  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6078  currentFrameIndex,
6079  m_FrameInUseCount,
6080  &bestRequest))
6081  {
6082  // We no longer have an empty Allocation.
6083  if(pBestRequestBlock->m_Metadata.IsEmpty())
6084  {
6085  m_HasEmptyBlock = false;
6086  }
6087  // Allocate from this pBlock.
6088  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6089  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6090  (*pAllocation)->InitBlockAllocation(
6091  hCurrentPool,
6092  pBestRequestBlock,
6093  bestRequest.offset,
6094  vkMemReq.alignment,
6095  vkMemReq.size,
6096  suballocType,
6097  mapped,
6098  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
6099  VMA_HEAVY_ASSERT(pBlock->Validate());
6100  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
6101  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
6102  return VK_SUCCESS;
6103  }
6104  // else: Some allocations must have been touched while we are here. Next try.
6105  }
6106  else
6107  {
6108  // Could not find place in any of the blocks - break outer loop.
6109  break;
6110  }
6111  }
6112  /* Maximum number of tries exceeded - a very unlike event when many other
6113  threads are simultaneously touching allocations making it impossible to make
6114  lost at the same time as we try to allocate. */
6115  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6116  {
6117  return VK_ERROR_TOO_MANY_OBJECTS;
6118  }
6119  }
6120 
6121  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6122 }
6123 
6124 void VmaBlockVector::Free(
6125  VmaAllocation hAllocation)
6126 {
6127  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6128 
6129  // Scope for lock.
6130  {
6131  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6132 
6133  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6134 
6135  if(hAllocation->IsPersistentMap())
6136  {
6137  pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
6138  }
6139 
6140  pBlock->m_Metadata.Free(hAllocation);
6141  VMA_HEAVY_ASSERT(pBlock->Validate());
6142 
6143  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
6144 
6145  // pBlock became empty after this deallocation.
6146  if(pBlock->m_Metadata.IsEmpty())
6147  {
6148  // Already has empty Allocation. We don't want to have two, so delete this one.
6149  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6150  {
6151  pBlockToDelete = pBlock;
6152  Remove(pBlock);
6153  }
6154  // We now have first empty Allocation.
6155  else
6156  {
6157  m_HasEmptyBlock = true;
6158  }
6159  }
6160  // pBlock didn't become empty, but we have another empty block - find and free that one.
6161  // (This is optional, heuristics.)
6162  else if(m_HasEmptyBlock)
6163  {
6164  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6165  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6166  {
6167  pBlockToDelete = pLastBlock;
6168  m_Blocks.pop_back();
6169  m_HasEmptyBlock = false;
6170  }
6171  }
6172 
6173  IncrementallySortBlocks();
6174  }
6175 
6176  // Destruction of a free Allocation. Deferred until this point, outside of mutex
6177  // lock, for performance reason.
6178  if(pBlockToDelete != VMA_NULL)
6179  {
6180  VMA_DEBUG_LOG(" Deleted empty allocation");
6181  pBlockToDelete->Destroy(m_hAllocator);
6182  vma_delete(m_hAllocator, pBlockToDelete);
6183  }
6184 }
6185 
6186 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6187 {
6188  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6189  {
6190  if(m_Blocks[blockIndex] == pBlock)
6191  {
6192  VmaVectorRemove(m_Blocks, blockIndex);
6193  return;
6194  }
6195  }
6196  VMA_ASSERT(0);
6197 }
6198 
6199 void VmaBlockVector::IncrementallySortBlocks()
6200 {
6201  // Bubble sort only until first swap.
6202  for(size_t i = 1; i < m_Blocks.size(); ++i)
6203  {
6204  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6205  {
6206  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6207  return;
6208  }
6209  }
6210 }
6211 
6212 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
6213 {
6214  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6215  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6216  allocInfo.allocationSize = blockSize;
6217  VkDeviceMemory mem = VK_NULL_HANDLE;
6218  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6219  if(res < 0)
6220  {
6221  return res;
6222  }
6223 
6224  // New VkDeviceMemory successfully created.
6225 
6226  // Create new Allocation for it.
6227  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6228  pBlock->Init(
6229  m_MemoryTypeIndex,
6230  mem,
6231  allocInfo.allocationSize);
6232 
6233  m_Blocks.push_back(pBlock);
6234  if(pNewBlockIndex != VMA_NULL)
6235  {
6236  *pNewBlockIndex = m_Blocks.size() - 1;
6237  }
6238 
6239  return VK_SUCCESS;
6240 }
6241 
6242 #if VMA_STATS_STRING_ENABLED
6243 
6244 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
6245 {
6246  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6247 
6248  json.BeginObject();
6249 
6250  if(m_IsCustomPool)
6251  {
6252  json.WriteString("MemoryTypeIndex");
6253  json.WriteNumber(m_MemoryTypeIndex);
6254 
6255  json.WriteString("BlockSize");
6256  json.WriteNumber(m_PreferredBlockSize);
6257 
6258  json.WriteString("BlockCount");
6259  json.BeginObject(true);
6260  if(m_MinBlockCount > 0)
6261  {
6262  json.WriteString("Min");
6263  json.WriteNumber(m_MinBlockCount);
6264  }
6265  if(m_MaxBlockCount < SIZE_MAX)
6266  {
6267  json.WriteString("Max");
6268  json.WriteNumber(m_MaxBlockCount);
6269  }
6270  json.WriteString("Cur");
6271  json.WriteNumber(m_Blocks.size());
6272  json.EndObject();
6273 
6274  if(m_FrameInUseCount > 0)
6275  {
6276  json.WriteString("FrameInUseCount");
6277  json.WriteNumber(m_FrameInUseCount);
6278  }
6279  }
6280  else
6281  {
6282  json.WriteString("PreferredBlockSize");
6283  json.WriteNumber(m_PreferredBlockSize);
6284  }
6285 
6286  json.WriteString("Blocks");
6287  json.BeginArray();
6288  for(size_t i = 0; i < m_Blocks.size(); ++i)
6289  {
6290  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6291  }
6292  json.EndArray();
6293 
6294  json.EndObject();
6295 }
6296 
6297 #endif // #if VMA_STATS_STRING_ENABLED
6298 
6299 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6300  VmaAllocator hAllocator,
6301  uint32_t currentFrameIndex)
6302 {
6303  if(m_pDefragmentator == VMA_NULL)
6304  {
6305  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6306  hAllocator,
6307  this,
6308  currentFrameIndex);
6309  }
6310 
6311  return m_pDefragmentator;
6312 }
6313 
6314 VkResult VmaBlockVector::Defragment(
6315  VmaDefragmentationStats* pDefragmentationStats,
6316  VkDeviceSize& maxBytesToMove,
6317  uint32_t& maxAllocationsToMove)
6318 {
6319  if(m_pDefragmentator == VMA_NULL)
6320  {
6321  return VK_SUCCESS;
6322  }
6323 
6324  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6325 
6326  // Defragment.
6327  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6328 
6329  // Accumulate statistics.
6330  if(pDefragmentationStats != VMA_NULL)
6331  {
6332  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
6333  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6334  pDefragmentationStats->bytesMoved += bytesMoved;
6335  pDefragmentationStats->allocationsMoved += allocationsMoved;
6336  VMA_ASSERT(bytesMoved <= maxBytesToMove);
6337  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6338  maxBytesToMove -= bytesMoved;
6339  maxAllocationsToMove -= allocationsMoved;
6340  }
6341 
6342  // Free empty blocks.
6343  m_HasEmptyBlock = false;
6344  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
6345  {
6346  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6347  if(pBlock->m_Metadata.IsEmpty())
6348  {
6349  if(m_Blocks.size() > m_MinBlockCount)
6350  {
6351  if(pDefragmentationStats != VMA_NULL)
6352  {
6353  ++pDefragmentationStats->deviceMemoryBlocksFreed;
6354  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
6355  }
6356 
6357  VmaVectorRemove(m_Blocks, blockIndex);
6358  pBlock->Destroy(m_hAllocator);
6359  vma_delete(m_hAllocator, pBlock);
6360  }
6361  else
6362  {
6363  m_HasEmptyBlock = true;
6364  }
6365  }
6366  }
6367 
6368  return result;
6369 }
6370 
6371 void VmaBlockVector::DestroyDefragmentator()
6372 {
6373  if(m_pDefragmentator != VMA_NULL)
6374  {
6375  vma_delete(m_hAllocator, m_pDefragmentator);
6376  m_pDefragmentator = VMA_NULL;
6377  }
6378 }
6379 
6380 void VmaBlockVector::MakePoolAllocationsLost(
6381  uint32_t currentFrameIndex,
6382  size_t* pLostAllocationCount)
6383 {
6384  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6385 
6386  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6387  {
6388  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
6389  VMA_ASSERT(pBlock);
6390  pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6391  }
6392 }
6393 
6394 void VmaBlockVector::AddStats(VmaStats* pStats)
6395 {
6396  const uint32_t memTypeIndex = m_MemoryTypeIndex;
6397  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6398 
6399  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6400 
6401  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6402  {
6403  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
6404  VMA_ASSERT(pBlock);
6405  VMA_HEAVY_ASSERT(pBlock->Validate());
6406  VmaStatInfo allocationStatInfo;
6407  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6408  VmaAddStatInfo(pStats->total, allocationStatInfo);
6409  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
6410  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
6411  }
6412 }
6413 
6415 // VmaDefragmentator members definition
6416 
6417 VmaDefragmentator::VmaDefragmentator(
6418  VmaAllocator hAllocator,
6419  VmaBlockVector* pBlockVector,
6420  uint32_t currentFrameIndex) :
6421  m_hAllocator(hAllocator),
6422  m_pBlockVector(pBlockVector),
6423  m_CurrentFrameIndex(currentFrameIndex),
6424  m_BytesMoved(0),
6425  m_AllocationsMoved(0),
6426  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6427  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6428 {
6429 }
6430 
6431 VmaDefragmentator::~VmaDefragmentator()
6432 {
6433  for(size_t i = m_Blocks.size(); i--; )
6434  {
6435  vma_delete(m_hAllocator, m_Blocks[i]);
6436  }
6437 }
6438 
6439 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6440 {
6441  AllocationInfo allocInfo;
6442  allocInfo.m_hAllocation = hAlloc;
6443  allocInfo.m_pChanged = pChanged;
6444  m_Allocations.push_back(allocInfo);
6445 }
6446 
6447 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
6448 {
6449  // It has already been mapped for defragmentation.
6450  if(m_pMappedDataForDefragmentation)
6451  {
6452  *ppMappedData = m_pMappedDataForDefragmentation;
6453  return VK_SUCCESS;
6454  }
6455 
6456  // It is originally mapped.
6457  if(m_pBlock->m_Mapping.GetMappedData())
6458  {
6459  *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6460  return VK_SUCCESS;
6461  }
6462 
6463  // Map on first usage.
6464  VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
6465  *ppMappedData = m_pMappedDataForDefragmentation;
6466  return res;
6467 }
6468 
6469 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6470 {
6471  if(m_pMappedDataForDefragmentation != VMA_NULL)
6472  {
6473  m_pBlock->Unmap(hAllocator);
6474  }
6475 }
6476 
6477 VkResult VmaDefragmentator::DefragmentRound(
6478  VkDeviceSize maxBytesToMove,
6479  uint32_t maxAllocationsToMove)
6480 {
6481  if(m_Blocks.empty())
6482  {
6483  return VK_SUCCESS;
6484  }
6485 
6486  size_t srcBlockIndex = m_Blocks.size() - 1;
6487  size_t srcAllocIndex = SIZE_MAX;
6488  for(;;)
6489  {
6490  // 1. Find next allocation to move.
6491  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
6492  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
6493  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6494  {
6495  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6496  {
6497  // Finished: no more allocations to process.
6498  if(srcBlockIndex == 0)
6499  {
6500  return VK_SUCCESS;
6501  }
6502  else
6503  {
6504  --srcBlockIndex;
6505  srcAllocIndex = SIZE_MAX;
6506  }
6507  }
6508  else
6509  {
6510  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6511  }
6512  }
6513 
6514  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6515  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6516 
6517  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6518  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6519  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6520  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6521 
6522  // 2. Try to find new place for this allocation in preceding or current block.
6523  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6524  {
6525  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6526  VmaAllocationRequest dstAllocRequest;
6527  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6528  m_CurrentFrameIndex,
6529  m_pBlockVector->GetFrameInUseCount(),
6530  m_pBlockVector->GetBufferImageGranularity(),
6531  size,
6532  alignment,
6533  suballocType,
6534  false, // canMakeOtherLost
6535  &dstAllocRequest) &&
6536  MoveMakesSense(
6537  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6538  {
6539  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6540 
6541  // Reached limit on number of allocations or bytes to move.
6542  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6543  (m_BytesMoved + size > maxBytesToMove))
6544  {
6545  return VK_INCOMPLETE;
6546  }
6547 
6548  void* pDstMappedData = VMA_NULL;
6549  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6550  if(res != VK_SUCCESS)
6551  {
6552  return res;
6553  }
6554 
6555  void* pSrcMappedData = VMA_NULL;
6556  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6557  if(res != VK_SUCCESS)
6558  {
6559  return res;
6560  }
6561 
6562  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
6563  memcpy(
6564  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6565  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6566  static_cast<size_t>(size));
6567 
6568  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6569  pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6570 
6571  allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6572 
6573  if(allocInfo.m_pChanged != VMA_NULL)
6574  {
6575  *allocInfo.m_pChanged = VK_TRUE;
6576  }
6577 
6578  ++m_AllocationsMoved;
6579  m_BytesMoved += size;
6580 
6581  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6582 
6583  break;
6584  }
6585  }
6586 
6587  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
6588 
6589  if(srcAllocIndex > 0)
6590  {
6591  --srcAllocIndex;
6592  }
6593  else
6594  {
6595  if(srcBlockIndex > 0)
6596  {
6597  --srcBlockIndex;
6598  srcAllocIndex = SIZE_MAX;
6599  }
6600  else
6601  {
6602  return VK_SUCCESS;
6603  }
6604  }
6605  }
6606 }
6607 
6608 VkResult VmaDefragmentator::Defragment(
6609  VkDeviceSize maxBytesToMove,
6610  uint32_t maxAllocationsToMove)
6611 {
6612  if(m_Allocations.empty())
6613  {
6614  return VK_SUCCESS;
6615  }
6616 
6617  // Create block info for each block.
6618  const size_t blockCount = m_pBlockVector->m_Blocks.size();
6619  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6620  {
6621  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6622  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6623  m_Blocks.push_back(pBlockInfo);
6624  }
6625 
6626  // Sort them by m_pBlock pointer value.
6627  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6628 
6629  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
6630  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6631  {
6632  AllocationInfo& allocInfo = m_Allocations[blockIndex];
6633  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
6634  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6635  {
6636  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6637  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6638  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6639  {
6640  (*it)->m_Allocations.push_back(allocInfo);
6641  }
6642  else
6643  {
6644  VMA_ASSERT(0);
6645  }
6646  }
6647  }
6648  m_Allocations.clear();
6649 
6650  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6651  {
6652  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6653  pBlockInfo->CalcHasNonMovableAllocations();
6654  pBlockInfo->SortAllocationsBySizeDescecnding();
6655  }
6656 
6657  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
6658  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6659 
6660  // Execute defragmentation rounds (the main part).
6661  VkResult result = VK_SUCCESS;
6662  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6663  {
6664  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6665  }
6666 
6667  // Unmap blocks that were mapped for defragmentation.
6668  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6669  {
6670  m_Blocks[blockIndex]->Unmap(m_hAllocator);
6671  }
6672 
6673  return result;
6674 }
6675 
6676 bool VmaDefragmentator::MoveMakesSense(
6677  size_t dstBlockIndex, VkDeviceSize dstOffset,
6678  size_t srcBlockIndex, VkDeviceSize srcOffset)
6679 {
6680  if(dstBlockIndex < srcBlockIndex)
6681  {
6682  return true;
6683  }
6684  if(dstBlockIndex > srcBlockIndex)
6685  {
6686  return false;
6687  }
6688  if(dstOffset < srcOffset)
6689  {
6690  return true;
6691  }
6692  return false;
6693 }
6694 
6696 // VmaAllocator_T
6697 
6698 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
6699  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
6700  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
6701  m_hDevice(pCreateInfo->device),
6702  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6703  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6704  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6705  m_PreferredLargeHeapBlockSize(0),
6706  m_PreferredSmallHeapBlockSize(0),
6707  m_PhysicalDevice(pCreateInfo->physicalDevice),
6708  m_CurrentFrameIndex(0),
6709  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6710 {
6711  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
6712 
6713  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
6714  memset(&m_MemProps, 0, sizeof(m_MemProps));
6715  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
6716 
6717  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
6718  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
6719 
6720  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6721  {
6722  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6723  }
6724 
6725  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
6726  {
6727  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
6728  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
6729  }
6730 
6731  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
6732 
6733  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6734  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6735 
6736  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
6737  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
6738  m_PreferredSmallHeapBlockSize = (pCreateInfo->preferredSmallHeapBlockSize != 0) ?
6739  pCreateInfo->preferredSmallHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE);
6740 
6741  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
6742  {
6743  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6744  {
6745  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
6746  if(limit != VK_WHOLE_SIZE)
6747  {
6748  m_HeapSizeLimit[heapIndex] = limit;
6749  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6750  {
6751  m_MemProps.memoryHeaps[heapIndex].size = limit;
6752  }
6753  }
6754  }
6755  }
6756 
6757  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6758  {
6759  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6760 
6761  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
6762  this,
6763  memTypeIndex,
6764  preferredBlockSize,
6765  0,
6766  SIZE_MAX,
6767  GetBufferImageGranularity(),
6768  pCreateInfo->frameInUseCount,
6769  false); // isCustomPool
6770  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
6771  // becase minBlockCount is 0.
6772  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6773  }
6774 }
6775 
6776 VmaAllocator_T::~VmaAllocator_T()
6777 {
6778  VMA_ASSERT(m_Pools.empty());
6779 
6780  for(size_t i = GetMemoryTypeCount(); i--; )
6781  {
6782  vma_delete(this, m_pDedicatedAllocations[i]);
6783  vma_delete(this, m_pBlockVectors[i]);
6784  }
6785 }
6786 
6787 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
6788 {
6789 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
6790  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6791  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6792  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6793  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6794  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6795  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6796  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6797  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6798  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6799  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6800  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6801  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6802  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6803  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6804  if(m_UseKhrDedicatedAllocation)
6805  {
6806  m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
6807  (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
6808  m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
6809  (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
6810  }
6811 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
6812 
6813 #define VMA_COPY_IF_NOT_NULL(funcName) \
6814  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
6815 
6816  if(pVulkanFunctions != VMA_NULL)
6817  {
6818  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6819  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6820  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6821  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6822  VMA_COPY_IF_NOT_NULL(vkMapMemory);
6823  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6824  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6825  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6826  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6827  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6828  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6829  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6830  VMA_COPY_IF_NOT_NULL(vkCreateImage);
6831  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6832  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6833  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6834  }
6835 
6836 #undef VMA_COPY_IF_NOT_NULL
6837 
6838  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
6839  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
6840  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6841  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6842  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6843  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6844  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6845  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6846  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6847  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6848  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6849  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6850  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6851  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6852  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6853  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6854  if(m_UseKhrDedicatedAllocation)
6855  {
6856  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6857  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6858  }
6859 }
6860 
6861 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6862 {
6863  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6864  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6865  const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE ||
6866  // HOST_CACHED memory type is treated as small despite it has full size of CPU memory heap, because we usually don't use much of it.
6867  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0;
6868  return isSmallHeap ? m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6869 }
6870 
6871 VkResult VmaAllocator_T::AllocateMemoryOfType(
6872  const VkMemoryRequirements& vkMemReq,
6873  bool dedicatedAllocation,
6874  VkBuffer dedicatedBuffer,
6875  VkImage dedicatedImage,
6876  const VmaAllocationCreateInfo& createInfo,
6877  uint32_t memTypeIndex,
6878  VmaSuballocationType suballocType,
6879  VmaAllocation* pAllocation)
6880 {
6881  VMA_ASSERT(pAllocation != VMA_NULL);
6882  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6883 
6884  VmaAllocationCreateInfo finalCreateInfo = createInfo;
6885 
6886  // If memory type is not HOST_VISIBLE, disable MAPPED.
6887  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
6888  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6889  {
6890  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
6891  }
6892 
6893  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
6894  VMA_ASSERT(blockVector);
6895 
6896  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6897  bool preferDedicatedMemory =
6898  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6899  dedicatedAllocation ||
6900  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
6901  vkMemReq.size > preferredBlockSize / 2;
6902 
6903  if(preferDedicatedMemory &&
6904  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
6905  finalCreateInfo.pool == VK_NULL_HANDLE)
6906  {
6908  }
6909 
6910  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
6911  {
6912  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
6913  {
6914  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6915  }
6916  else
6917  {
6918  return AllocateDedicatedMemory(
6919  vkMemReq.size,
6920  suballocType,
6921  memTypeIndex,
6922  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
6923  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
6924  finalCreateInfo.pUserData,
6925  dedicatedBuffer,
6926  dedicatedImage,
6927  pAllocation);
6928  }
6929  }
6930  else
6931  {
6932  VkResult res = blockVector->Allocate(
6933  VK_NULL_HANDLE, // hCurrentPool
6934  m_CurrentFrameIndex.load(),
6935  vkMemReq,
6936  finalCreateInfo,
6937  suballocType,
6938  pAllocation);
6939  if(res == VK_SUCCESS)
6940  {
6941  return res;
6942  }
6943 
6944  // 5. Try dedicated memory.
6945  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
6946  {
6947  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6948  }
6949  else
6950  {
6951  res = AllocateDedicatedMemory(
6952  vkMemReq.size,
6953  suballocType,
6954  memTypeIndex,
6955  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
6956  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
6957  finalCreateInfo.pUserData,
6958  dedicatedBuffer,
6959  dedicatedImage,
6960  pAllocation);
6961  if(res == VK_SUCCESS)
6962  {
6963  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
6964  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
6965  return VK_SUCCESS;
6966  }
6967  else
6968  {
6969  // Everything failed: Return error code.
6970  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
6971  return res;
6972  }
6973  }
6974  }
6975 }
6976 
6977 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6978  VkDeviceSize size,
6979  VmaSuballocationType suballocType,
6980  uint32_t memTypeIndex,
6981  bool map,
6982  bool isUserDataString,
6983  void* pUserData,
6984  VkBuffer dedicatedBuffer,
6985  VkImage dedicatedImage,
6986  VmaAllocation* pAllocation)
6987 {
6988  VMA_ASSERT(pAllocation);
6989 
6990  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6991  allocInfo.memoryTypeIndex = memTypeIndex;
6992  allocInfo.allocationSize = size;
6993 
6994  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6995  if(m_UseKhrDedicatedAllocation)
6996  {
6997  if(dedicatedBuffer != VK_NULL_HANDLE)
6998  {
6999  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7000  dedicatedAllocInfo.buffer = dedicatedBuffer;
7001  allocInfo.pNext = &dedicatedAllocInfo;
7002  }
7003  else if(dedicatedImage != VK_NULL_HANDLE)
7004  {
7005  dedicatedAllocInfo.image = dedicatedImage;
7006  allocInfo.pNext = &dedicatedAllocInfo;
7007  }
7008  }
7009 
7010  // Allocate VkDeviceMemory.
7011  VkDeviceMemory hMemory = VK_NULL_HANDLE;
7012  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7013  if(res < 0)
7014  {
7015  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
7016  return res;
7017  }
7018 
7019  void* pMappedData = nullptr;
7020  if(map)
7021  {
7022  res = (*m_VulkanFunctions.vkMapMemory)(
7023  m_hDevice,
7024  hMemory,
7025  0,
7026  VK_WHOLE_SIZE,
7027  0,
7028  &pMappedData);
7029  if(res < 0)
7030  {
7031  VMA_DEBUG_LOG(" vkMapMemory FAILED");
7032  FreeVulkanMemory(memTypeIndex, size, hMemory);
7033  return res;
7034  }
7035  }
7036 
7037  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7038  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7039  (*pAllocation)->SetUserData(this, pUserData);
7040 
7041  // Register it in m_pDedicatedAllocations.
7042  {
7043  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7044  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7045  VMA_ASSERT(pDedicatedAllocations);
7046  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7047  }
7048 
7049  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7050 
7051  return VK_SUCCESS;
7052 }
7053 
7054 void VmaAllocator_T::GetBufferMemoryRequirements(
7055  VkBuffer hBuffer,
7056  VkMemoryRequirements& memReq,
7057  bool& requiresDedicatedAllocation,
7058  bool& prefersDedicatedAllocation) const
7059 {
7060  if(m_UseKhrDedicatedAllocation)
7061  {
7062  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7063  memReqInfo.buffer = hBuffer;
7064 
7065  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7066 
7067  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7068  memReq2.pNext = &memDedicatedReq;
7069 
7070  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7071 
7072  memReq = memReq2.memoryRequirements;
7073  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7074  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7075  }
7076  else
7077  {
7078  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7079  requiresDedicatedAllocation = false;
7080  prefersDedicatedAllocation = false;
7081  }
7082 }
7083 
7084 void VmaAllocator_T::GetImageMemoryRequirements(
7085  VkImage hImage,
7086  VkMemoryRequirements& memReq,
7087  bool& requiresDedicatedAllocation,
7088  bool& prefersDedicatedAllocation) const
7089 {
7090  if(m_UseKhrDedicatedAllocation)
7091  {
7092  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7093  memReqInfo.image = hImage;
7094 
7095  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7096 
7097  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7098  memReq2.pNext = &memDedicatedReq;
7099 
7100  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7101 
7102  memReq = memReq2.memoryRequirements;
7103  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7104  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7105  }
7106  else
7107  {
7108  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7109  requiresDedicatedAllocation = false;
7110  prefersDedicatedAllocation = false;
7111  }
7112 }
7113 
7114 VkResult VmaAllocator_T::AllocateMemory(
7115  const VkMemoryRequirements& vkMemReq,
7116  bool requiresDedicatedAllocation,
7117  bool prefersDedicatedAllocation,
7118  VkBuffer dedicatedBuffer,
7119  VkImage dedicatedImage,
7120  const VmaAllocationCreateInfo& createInfo,
7121  VmaSuballocationType suballocType,
7122  VmaAllocation* pAllocation)
7123 {
7124  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
7125  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7126  {
7127  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7128  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7129  }
7130  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
7132  {
7133  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7134  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7135  }
7136  if(requiresDedicatedAllocation)
7137  {
7138  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
7139  {
7140  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7141  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7142  }
7143  if(createInfo.pool != VK_NULL_HANDLE)
7144  {
7145  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
7146  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7147  }
7148  }
7149  if((createInfo.pool != VK_NULL_HANDLE) &&
7150  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
7151  {
7152  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7153  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7154  }
7155 
7156  if(createInfo.pool != VK_NULL_HANDLE)
7157  {
7158  return createInfo.pool->m_BlockVector.Allocate(
7159  createInfo.pool,
7160  m_CurrentFrameIndex.load(),
7161  vkMemReq,
7162  createInfo,
7163  suballocType,
7164  pAllocation);
7165  }
7166  else
7167  {
7168  // Bit mask of memory Vulkan types acceptable for this allocation.
7169  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7170  uint32_t memTypeIndex = UINT32_MAX;
7171  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
7172  if(res == VK_SUCCESS)
7173  {
7174  res = AllocateMemoryOfType(
7175  vkMemReq,
7176  requiresDedicatedAllocation || prefersDedicatedAllocation,
7177  dedicatedBuffer,
7178  dedicatedImage,
7179  createInfo,
7180  memTypeIndex,
7181  suballocType,
7182  pAllocation);
7183  // Succeeded on first try.
7184  if(res == VK_SUCCESS)
7185  {
7186  return res;
7187  }
7188  // Allocation from this memory type failed. Try other compatible memory types.
7189  else
7190  {
7191  for(;;)
7192  {
7193  // Remove old memTypeIndex from list of possibilities.
7194  memoryTypeBits &= ~(1u << memTypeIndex);
7195  // Find alternative memTypeIndex.
7196  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
7197  if(res == VK_SUCCESS)
7198  {
7199  res = AllocateMemoryOfType(
7200  vkMemReq,
7201  requiresDedicatedAllocation || prefersDedicatedAllocation,
7202  dedicatedBuffer,
7203  dedicatedImage,
7204  createInfo,
7205  memTypeIndex,
7206  suballocType,
7207  pAllocation);
7208  // Allocation from this alternative memory type succeeded.
7209  if(res == VK_SUCCESS)
7210  {
7211  return res;
7212  }
7213  // else: Allocation from this memory type failed. Try next one - next loop iteration.
7214  }
7215  // No other matching memory type index could be found.
7216  else
7217  {
7218  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
7219  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7220  }
7221  }
7222  }
7223  }
7224  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
7225  else
7226  return res;
7227  }
7228 }
7229 
7230 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
7231 {
7232  VMA_ASSERT(allocation);
7233 
7234  if(allocation->CanBecomeLost() == false ||
7235  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7236  {
7237  switch(allocation->GetType())
7238  {
7239  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7240  {
7241  VmaBlockVector* pBlockVector = VMA_NULL;
7242  VmaPool hPool = allocation->GetPool();
7243  if(hPool != VK_NULL_HANDLE)
7244  {
7245  pBlockVector = &hPool->m_BlockVector;
7246  }
7247  else
7248  {
7249  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7250  pBlockVector = m_pBlockVectors[memTypeIndex];
7251  }
7252  pBlockVector->Free(allocation);
7253  }
7254  break;
7255  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7256  FreeDedicatedMemory(allocation);
7257  break;
7258  default:
7259  VMA_ASSERT(0);
7260  }
7261  }
7262 
7263  allocation->SetUserData(this, VMA_NULL);
7264  vma_delete(this, allocation);
7265 }
7266 
7267 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
7268 {
7269  // Initialize.
7270  InitStatInfo(pStats->total);
7271  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7272  InitStatInfo(pStats->memoryType[i]);
7273  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7274  InitStatInfo(pStats->memoryHeap[i]);
7275 
7276  // Process default pools.
7277  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7278  {
7279  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
7280  VMA_ASSERT(pBlockVector);
7281  pBlockVector->AddStats(pStats);
7282  }
7283 
7284  // Process custom pools.
7285  {
7286  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7287  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7288  {
7289  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7290  }
7291  }
7292 
7293  // Process dedicated allocations.
7294  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7295  {
7296  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7297  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7298  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7299  VMA_ASSERT(pDedicatedAllocVector);
7300  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7301  {
7302  VmaStatInfo allocationStatInfo;
7303  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7304  VmaAddStatInfo(pStats->total, allocationStatInfo);
7305  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
7306  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
7307  }
7308  }
7309 
7310  // Postprocess.
7311  VmaPostprocessCalcStatInfo(pStats->total);
7312  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
7313  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
7314  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
7315  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
7316 }
7317 
7318 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7319 
7320 VkResult VmaAllocator_T::Defragment(
7321  VmaAllocation* pAllocations,
7322  size_t allocationCount,
7323  VkBool32* pAllocationsChanged,
7324  const VmaDefragmentationInfo* pDefragmentationInfo,
7325  VmaDefragmentationStats* pDefragmentationStats)
7326 {
7327  if(pAllocationsChanged != VMA_NULL)
7328  {
7329  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
7330  }
7331  if(pDefragmentationStats != VMA_NULL)
7332  {
7333  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
7334  }
7335 
7336  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7337 
7338  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7339 
7340  const size_t poolCount = m_Pools.size();
7341 
7342  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
7343  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7344  {
7345  VmaAllocation hAlloc = pAllocations[allocIndex];
7346  VMA_ASSERT(hAlloc);
7347  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7348  // DedicatedAlloc cannot be defragmented.
7349  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7350  // Only HOST_VISIBLE memory types can be defragmented.
7351  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7352  // Lost allocation cannot be defragmented.
7353  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7354  {
7355  VmaBlockVector* pAllocBlockVector = nullptr;
7356 
7357  const VmaPool hAllocPool = hAlloc->GetPool();
7358  // This allocation belongs to custom pool.
7359  if(hAllocPool != VK_NULL_HANDLE)
7360  {
7361  pAllocBlockVector = &hAllocPool->GetBlockVector();
7362  }
7363  // This allocation belongs to general pool.
7364  else
7365  {
7366  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7367  }
7368 
7369  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
7370 
7371  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
7372  &pAllocationsChanged[allocIndex] : VMA_NULL;
7373  pDefragmentator->AddAllocation(hAlloc, pChanged);
7374  }
7375  }
7376 
7377  VkResult result = VK_SUCCESS;
7378 
7379  // ======== Main processing.
7380 
7381  VkDeviceSize maxBytesToMove = SIZE_MAX;
7382  uint32_t maxAllocationsToMove = UINT32_MAX;
7383  if(pDefragmentationInfo != VMA_NULL)
7384  {
7385  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
7386  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
7387  }
7388 
7389  // Process standard memory.
7390  for(uint32_t memTypeIndex = 0;
7391  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7392  ++memTypeIndex)
7393  {
7394  // Only HOST_VISIBLE memory types can be defragmented.
7395  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7396  {
7397  result = m_pBlockVectors[memTypeIndex]->Defragment(
7398  pDefragmentationStats,
7399  maxBytesToMove,
7400  maxAllocationsToMove);
7401  }
7402  }
7403 
7404  // Process custom pools.
7405  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7406  {
7407  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7408  pDefragmentationStats,
7409  maxBytesToMove,
7410  maxAllocationsToMove);
7411  }
7412 
7413  // ======== Destroy defragmentators.
7414 
7415  // Process custom pools.
7416  for(size_t poolIndex = poolCount; poolIndex--; )
7417  {
7418  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7419  }
7420 
7421  // Process standard memory.
7422  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7423  {
7424  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7425  {
7426  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7427  }
7428  }
7429 
7430  return result;
7431 }
7432 
7433 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
7434 {
7435  if(hAllocation->CanBecomeLost())
7436  {
7437  /*
7438  Warning: This is a carefully designed algorithm.
7439  Do not modify unless you really know what you're doing :)
7440  */
7441  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7442  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7443  for(;;)
7444  {
7445  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7446  {
7447  pAllocationInfo->memoryType = UINT32_MAX;
7448  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
7449  pAllocationInfo->offset = 0;
7450  pAllocationInfo->size = hAllocation->GetSize();
7451  pAllocationInfo->pMappedData = VMA_NULL;
7452  pAllocationInfo->pUserData = hAllocation->GetUserData();
7453  return;
7454  }
7455  else if(localLastUseFrameIndex == localCurrFrameIndex)
7456  {
7457  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
7458  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
7459  pAllocationInfo->offset = hAllocation->GetOffset();
7460  pAllocationInfo->size = hAllocation->GetSize();
7461  pAllocationInfo->pMappedData = VMA_NULL;
7462  pAllocationInfo->pUserData = hAllocation->GetUserData();
7463  return;
7464  }
7465  else // Last use time earlier than current time.
7466  {
7467  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7468  {
7469  localLastUseFrameIndex = localCurrFrameIndex;
7470  }
7471  }
7472  }
7473  }
7474  else
7475  {
7476  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
7477  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
7478  pAllocationInfo->offset = hAllocation->GetOffset();
7479  pAllocationInfo->size = hAllocation->GetSize();
7480  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
7481  pAllocationInfo->pUserData = hAllocation->GetUserData();
7482  }
7483 }
7484 
7485 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7486 {
7487  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
7488 
7489  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
7490 
7491  if(newCreateInfo.maxBlockCount == 0)
7492  {
7493  newCreateInfo.maxBlockCount = SIZE_MAX;
7494  }
7495  if(newCreateInfo.blockSize == 0)
7496  {
7497  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
7498  }
7499 
7500  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
7501 
7502  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7503  if(res != VK_SUCCESS)
7504  {
7505  vma_delete(this, *pPool);
7506  *pPool = VMA_NULL;
7507  return res;
7508  }
7509 
7510  // Add to m_Pools.
7511  {
7512  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7513  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7514  }
7515 
7516  return VK_SUCCESS;
7517 }
7518 
7519 void VmaAllocator_T::DestroyPool(VmaPool pool)
7520 {
7521  // Remove from m_Pools.
7522  {
7523  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7524  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7525  VMA_ASSERT(success && "Pool not found in Allocator.");
7526  }
7527 
7528  vma_delete(this, pool);
7529 }
7530 
7531 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
7532 {
7533  pool->m_BlockVector.GetPoolStats(pPoolStats);
7534 }
7535 
7536 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7537 {
7538  m_CurrentFrameIndex.store(frameIndex);
7539 }
7540 
7541 void VmaAllocator_T::MakePoolAllocationsLost(
7542  VmaPool hPool,
7543  size_t* pLostAllocationCount)
7544 {
7545  hPool->m_BlockVector.MakePoolAllocationsLost(
7546  m_CurrentFrameIndex.load(),
7547  pLostAllocationCount);
7548 }
7549 
7550 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7551 {
7552  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
7553  (*pAllocation)->InitLost();
7554 }
7555 
7556 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7557 {
7558  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7559 
7560  VkResult res;
7561  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7562  {
7563  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7564  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7565  {
7566  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7567  if(res == VK_SUCCESS)
7568  {
7569  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7570  }
7571  }
7572  else
7573  {
7574  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7575  }
7576  }
7577  else
7578  {
7579  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7580  }
7581 
7582  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
7583  {
7584  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7585  }
7586 
7587  return res;
7588 }
7589 
7590 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7591 {
7592  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
7593  {
7594  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
7595  }
7596 
7597  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7598 
7599  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7600  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7601  {
7602  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7603  m_HeapSizeLimit[heapIndex] += size;
7604  }
7605 }
7606 
7607 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
7608 {
7609  if(hAllocation->CanBecomeLost())
7610  {
7611  return VK_ERROR_MEMORY_MAP_FAILED;
7612  }
7613 
7614  switch(hAllocation->GetType())
7615  {
7616  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7617  {
7618  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
7619  char *pBytes = nullptr;
7620  VkResult res = pBlock->Map(this, (void**)&pBytes);
7621  if(res == VK_SUCCESS)
7622  {
7623  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7624  hAllocation->BlockAllocMap();
7625  }
7626  return res;
7627  }
7628  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7629  return hAllocation->DedicatedAllocMap(this, ppData);
7630  default:
7631  VMA_ASSERT(0);
7632  return VK_ERROR_MEMORY_MAP_FAILED;
7633  }
7634 }
7635 
7636 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7637 {
7638  switch(hAllocation->GetType())
7639  {
7640  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7641  {
7642  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
7643  hAllocation->BlockAllocUnmap();
7644  pBlock->Unmap(this);
7645  }
7646  break;
7647  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7648  hAllocation->DedicatedAllocUnmap(this);
7649  break;
7650  default:
7651  VMA_ASSERT(0);
7652  }
7653 }
7654 
7655 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7656 {
7657  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7658 
7659  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7660  {
7661  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7662  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7663  VMA_ASSERT(pDedicatedAllocations);
7664  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7665  VMA_ASSERT(success);
7666  }
7667 
7668  VkDeviceMemory hMemory = allocation->GetMemory();
7669 
7670  if(allocation->GetMappedData() != VMA_NULL)
7671  {
7672  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7673  }
7674 
7675  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7676 
7677  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7678 }
7679 
7680 #if VMA_STATS_STRING_ENABLED
7681 
7682 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7683 {
7684  bool dedicatedAllocationsStarted = false;
7685  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7686  {
7687  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7688  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7689  VMA_ASSERT(pDedicatedAllocVector);
7690  if(pDedicatedAllocVector->empty() == false)
7691  {
7692  if(dedicatedAllocationsStarted == false)
7693  {
7694  dedicatedAllocationsStarted = true;
7695  json.WriteString("DedicatedAllocations");
7696  json.BeginObject();
7697  }
7698 
7699  json.BeginString("Type ");
7700  json.ContinueString(memTypeIndex);
7701  json.EndString();
7702 
7703  json.BeginArray();
7704 
7705  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7706  {
7707  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7708  json.BeginObject(true);
7709 
7710  json.WriteString("Type");
7711  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7712 
7713  json.WriteString("Size");
7714  json.WriteNumber(hAlloc->GetSize());
7715 
7716  const void* pUserData = hAlloc->GetUserData();
7717  if(pUserData != VMA_NULL)
7718  {
7719  json.WriteString("UserData");
7720  if(hAlloc->IsUserDataString())
7721  {
7722  json.WriteString((const char*)pUserData);
7723  }
7724  else
7725  {
7726  json.BeginString();
7727  json.ContinueString_Pointer(pUserData);
7728  json.EndString();
7729  }
7730  }
7731 
7732  json.EndObject();
7733  }
7734 
7735  json.EndArray();
7736  }
7737  }
7738  if(dedicatedAllocationsStarted)
7739  {
7740  json.EndObject();
7741  }
7742 
7743  {
7744  bool allocationsStarted = false;
7745  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7746  {
7747  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
7748  {
7749  if(allocationsStarted == false)
7750  {
7751  allocationsStarted = true;
7752  json.WriteString("DefaultPools");
7753  json.BeginObject();
7754  }
7755 
7756  json.BeginString("Type ");
7757  json.ContinueString(memTypeIndex);
7758  json.EndString();
7759 
7760  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7761  }
7762  }
7763  if(allocationsStarted)
7764  {
7765  json.EndObject();
7766  }
7767  }
7768 
7769  {
7770  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7771  const size_t poolCount = m_Pools.size();
7772  if(poolCount > 0)
7773  {
7774  json.WriteString("Pools");
7775  json.BeginArray();
7776  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7777  {
7778  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7779  }
7780  json.EndArray();
7781  }
7782  }
7783 }
7784 
7785 #endif // #if VMA_STATS_STRING_ENABLED
7786 
7787 static VkResult AllocateMemoryForImage(
7788  VmaAllocator allocator,
7789  VkImage image,
7790  const VmaAllocationCreateInfo* pAllocationCreateInfo,
7791  VmaSuballocationType suballocType,
7792  VmaAllocation* pAllocation)
7793 {
7794  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7795 
7796  VkMemoryRequirements vkMemReq = {};
7797  bool requiresDedicatedAllocation = false;
7798  bool prefersDedicatedAllocation = false;
7799  allocator->GetImageMemoryRequirements(image, vkMemReq,
7800  requiresDedicatedAllocation, prefersDedicatedAllocation);
7801 
7802  return allocator->AllocateMemory(
7803  vkMemReq,
7804  requiresDedicatedAllocation,
7805  prefersDedicatedAllocation,
7806  VK_NULL_HANDLE, // dedicatedBuffer
7807  image, // dedicatedImage
7808  *pAllocationCreateInfo,
7809  suballocType,
7810  pAllocation);
7811 }
7812 
7814 // Public interface
7815 
7816 VkResult vmaCreateAllocator(
7817  const VmaAllocatorCreateInfo* pCreateInfo,
7818  VmaAllocator* pAllocator)
7819 {
7820  VMA_ASSERT(pCreateInfo && pAllocator);
7821  VMA_DEBUG_LOG("vmaCreateAllocator");
7822  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
7823  return VK_SUCCESS;
7824 }
7825 
7826 void vmaDestroyAllocator(
7827  VmaAllocator allocator)
7828 {
7829  if(allocator != VK_NULL_HANDLE)
7830  {
7831  VMA_DEBUG_LOG("vmaDestroyAllocator");
7832  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7833  vma_delete(&allocationCallbacks, allocator);
7834  }
7835 }
7836 
7838  VmaAllocator allocator,
7839  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7840 {
7841  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7842  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7843 }
7844 
7846  VmaAllocator allocator,
7847  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7848 {
7849  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7850  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7851 }
7852 
7854  VmaAllocator allocator,
7855  uint32_t memoryTypeIndex,
7856  VkMemoryPropertyFlags* pFlags)
7857 {
7858  VMA_ASSERT(allocator && pFlags);
7859  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7860  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7861 }
7862 
7864  VmaAllocator allocator,
7865  uint32_t frameIndex)
7866 {
7867  VMA_ASSERT(allocator);
7868  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7869 
7870  VMA_DEBUG_GLOBAL_MUTEX_LOCK
7871 
7872  allocator->SetCurrentFrameIndex(frameIndex);
7873 }
7874 
7875 void vmaCalculateStats(
7876  VmaAllocator allocator,
7877  VmaStats* pStats)
7878 {
7879  VMA_ASSERT(allocator && pStats);
7880  VMA_DEBUG_GLOBAL_MUTEX_LOCK
7881  allocator->CalculateStats(pStats);
7882 }
7883 
7884 #if VMA_STATS_STRING_ENABLED
7885 
7886 void vmaBuildStatsString(
7887  VmaAllocator allocator,
7888  char** ppStatsString,
7889  VkBool32 detailedMap)
7890 {
7891  VMA_ASSERT(allocator && ppStatsString);
7892  VMA_DEBUG_GLOBAL_MUTEX_LOCK
7893 
7894  VmaStringBuilder sb(allocator);
7895  {
7896  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7897  json.BeginObject();
7898 
7899  VmaStats stats;
7900  allocator->CalculateStats(&stats);
7901 
7902  json.WriteString("Total");
7903  VmaPrintStatInfo(json, stats.total);
7904 
7905  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7906  {
7907  json.BeginString("Heap ");
7908  json.ContinueString(heapIndex);
7909  json.EndString();
7910  json.BeginObject();
7911 
7912  json.WriteString("Size");
7913  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7914 
7915  json.WriteString("Flags");
7916  json.BeginArray(true);
7917  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7918  {
7919  json.WriteString("DEVICE_LOCAL");
7920  }
7921  json.EndArray();
7922 
7923  if(stats.memoryHeap[heapIndex].blockCount > 0)
7924  {
7925  json.WriteString("Stats");
7926  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
7927  }
7928 
7929  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7930  {
7931  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7932  {
7933  json.BeginString("Type ");
7934  json.ContinueString(typeIndex);
7935  json.EndString();
7936 
7937  json.BeginObject();
7938 
7939  json.WriteString("Flags");
7940  json.BeginArray(true);
7941  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7942  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7943  {
7944  json.WriteString("DEVICE_LOCAL");
7945  }
7946  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7947  {
7948  json.WriteString("HOST_VISIBLE");
7949  }
7950  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7951  {
7952  json.WriteString("HOST_COHERENT");
7953  }
7954  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7955  {
7956  json.WriteString("HOST_CACHED");
7957  }
7958  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7959  {
7960  json.WriteString("LAZILY_ALLOCATED");
7961  }
7962  json.EndArray();
7963 
7964  if(stats.memoryType[typeIndex].blockCount > 0)
7965  {
7966  json.WriteString("Stats");
7967  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
7968  }
7969 
7970  json.EndObject();
7971  }
7972  }
7973 
7974  json.EndObject();
7975  }
7976  if(detailedMap == VK_TRUE)
7977  {
7978  allocator->PrintDetailedMap(json);
7979  }
7980 
7981  json.EndObject();
7982  }
7983 
7984  const size_t len = sb.GetLength();
7985  char* const pChars = vma_new_array(allocator, char, len + 1);
7986  if(len > 0)
7987  {
7988  memcpy(pChars, sb.GetData(), len);
7989  }
7990  pChars[len] = '\0';
7991  *ppStatsString = pChars;
7992 }
7993 
7994 void vmaFreeStatsString(
7995  VmaAllocator allocator,
7996  char* pStatsString)
7997 {
7998  if(pStatsString != VMA_NULL)
7999  {
8000  VMA_ASSERT(allocator);
8001  size_t len = strlen(pStatsString);
8002  vma_delete_array(allocator, pStatsString, len + 1);
8003  }
8004 }
8005 
8006 #endif // #if VMA_STATS_STRING_ENABLED
8007 
8008 /*
8009 This function is not protected by any mutex because it just reads immutable data.
8010 */
8011 VkResult vmaFindMemoryTypeIndex(
8012  VmaAllocator allocator,
8013  uint32_t memoryTypeBits,
8014  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8015  uint32_t* pMemoryTypeIndex)
8016 {
8017  VMA_ASSERT(allocator != VK_NULL_HANDLE);
8018  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8019  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8020 
8021  if(pAllocationCreateInfo->memoryTypeBits != 0)
8022  {
8023  memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
8024  }
8025 
8026  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
8027  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
8028 
8029  // Convert usage to requiredFlags and preferredFlags.
8030  switch(pAllocationCreateInfo->usage)
8031  {
8033  break;
8035  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8036  break;
8038  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8039  break;
8041  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8042  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8043  break;
8045  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8046  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8047  break;
8048  default:
8049  break;
8050  }
8051 
8052  *pMemoryTypeIndex = UINT32_MAX;
8053  uint32_t minCost = UINT32_MAX;
8054  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8055  memTypeIndex < allocator->GetMemoryTypeCount();
8056  ++memTypeIndex, memTypeBit <<= 1)
8057  {
8058  // This memory type is acceptable according to memoryTypeBits bitmask.
8059  if((memTypeBit & memoryTypeBits) != 0)
8060  {
8061  const VkMemoryPropertyFlags currFlags =
8062  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8063  // This memory type contains requiredFlags.
8064  if((requiredFlags & ~currFlags) == 0)
8065  {
8066  // Calculate cost as number of bits from preferredFlags not present in this memory type.
8067  uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8068  // Remember memory type with lowest cost.
8069  if(currCost < minCost)
8070  {
8071  *pMemoryTypeIndex = memTypeIndex;
8072  if(currCost == 0)
8073  {
8074  return VK_SUCCESS;
8075  }
8076  minCost = currCost;
8077  }
8078  }
8079  }
8080  }
8081  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8082 }
8083 
8084 VkResult vmaCreatePool(
8085  VmaAllocator allocator,
8086  const VmaPoolCreateInfo* pCreateInfo,
8087  VmaPool* pPool)
8088 {
8089  VMA_ASSERT(allocator && pCreateInfo && pPool);
8090 
8091  VMA_DEBUG_LOG("vmaCreatePool");
8092 
8093  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8094 
8095  return allocator->CreatePool(pCreateInfo, pPool);
8096 }
8097 
8098 void vmaDestroyPool(
8099  VmaAllocator allocator,
8100  VmaPool pool)
8101 {
8102  VMA_ASSERT(allocator);
8103 
8104  if(pool == VK_NULL_HANDLE)
8105  {
8106  return;
8107  }
8108 
8109  VMA_DEBUG_LOG("vmaDestroyPool");
8110 
8111  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8112 
8113  allocator->DestroyPool(pool);
8114 }
8115 
8116 void vmaGetPoolStats(
8117  VmaAllocator allocator,
8118  VmaPool pool,
8119  VmaPoolStats* pPoolStats)
8120 {
8121  VMA_ASSERT(allocator && pool && pPoolStats);
8122 
8123  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8124 
8125  allocator->GetPoolStats(pool, pPoolStats);
8126 }
8127 
8129  VmaAllocator allocator,
8130  VmaPool pool,
8131  size_t* pLostAllocationCount)
8132 {
8133  VMA_ASSERT(allocator && pool);
8134 
8135  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8136 
8137  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8138 }
8139 
8140 VkResult vmaAllocateMemory(
8141  VmaAllocator allocator,
8142  const VkMemoryRequirements* pVkMemoryRequirements,
8143  const VmaAllocationCreateInfo* pCreateInfo,
8144  VmaAllocation* pAllocation,
8145  VmaAllocationInfo* pAllocationInfo)
8146 {
8147  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8148 
8149  VMA_DEBUG_LOG("vmaAllocateMemory");
8150 
8151  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8152 
8153  VkResult result = allocator->AllocateMemory(
8154  *pVkMemoryRequirements,
8155  false, // requiresDedicatedAllocation
8156  false, // prefersDedicatedAllocation
8157  VK_NULL_HANDLE, // dedicatedBuffer
8158  VK_NULL_HANDLE, // dedicatedImage
8159  *pCreateInfo,
8160  VMA_SUBALLOCATION_TYPE_UNKNOWN,
8161  pAllocation);
8162 
8163  if(pAllocationInfo && result == VK_SUCCESS)
8164  {
8165  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8166  }
8167 
8168  return result;
8169 }
8170 
8172  VmaAllocator allocator,
8173  VkBuffer buffer,
8174  const VmaAllocationCreateInfo* pCreateInfo,
8175  VmaAllocation* pAllocation,
8176  VmaAllocationInfo* pAllocationInfo)
8177 {
8178  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8179 
8180  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
8181 
8182  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8183 
8184  VkMemoryRequirements vkMemReq = {};
8185  bool requiresDedicatedAllocation = false;
8186  bool prefersDedicatedAllocation = false;
8187  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8188  requiresDedicatedAllocation,
8189  prefersDedicatedAllocation);
8190 
8191  VkResult result = allocator->AllocateMemory(
8192  vkMemReq,
8193  requiresDedicatedAllocation,
8194  prefersDedicatedAllocation,
8195  buffer, // dedicatedBuffer
8196  VK_NULL_HANDLE, // dedicatedImage
8197  *pCreateInfo,
8198  VMA_SUBALLOCATION_TYPE_BUFFER,
8199  pAllocation);
8200 
8201  if(pAllocationInfo && result == VK_SUCCESS)
8202  {
8203  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8204  }
8205 
8206  return result;
8207 }
8208 
8209 VkResult vmaAllocateMemoryForImage(
8210  VmaAllocator allocator,
8211  VkImage image,
8212  const VmaAllocationCreateInfo* pCreateInfo,
8213  VmaAllocation* pAllocation,
8214  VmaAllocationInfo* pAllocationInfo)
8215 {
8216  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8217 
8218  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
8219 
8220  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8221 
8222  VkResult result = AllocateMemoryForImage(
8223  allocator,
8224  image,
8225  pCreateInfo,
8226  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8227  pAllocation);
8228 
8229  if(pAllocationInfo && result == VK_SUCCESS)
8230  {
8231  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8232  }
8233 
8234  return result;
8235 }
8236 
8237 void vmaFreeMemory(
8238  VmaAllocator allocator,
8239  VmaAllocation allocation)
8240 {
8241  VMA_ASSERT(allocator && allocation);
8242 
8243  VMA_DEBUG_LOG("vmaFreeMemory");
8244 
8245  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8246 
8247  allocator->FreeMemory(allocation);
8248 }
8249 
8251  VmaAllocator allocator,
8252  VmaAllocation allocation,
8253  VmaAllocationInfo* pAllocationInfo)
8254 {
8255  VMA_ASSERT(allocator && allocation && pAllocationInfo);
8256 
8257  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8258 
8259  allocator->GetAllocationInfo(allocation, pAllocationInfo);
8260 }
8261 
8263  VmaAllocator allocator,
8264  VmaAllocation allocation,
8265  void* pUserData)
8266 {
8267  VMA_ASSERT(allocator && allocation);
8268 
8269  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8270 
8271  allocation->SetUserData(allocator, pUserData);
8272 }
8273 
8275  VmaAllocator allocator,
8276  VmaAllocation* pAllocation)
8277 {
8278  VMA_ASSERT(allocator && pAllocation);
8279 
8280  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8281 
8282  allocator->CreateLostAllocation(pAllocation);
8283 }
8284 
8285 VkResult vmaMapMemory(
8286  VmaAllocator allocator,
8287  VmaAllocation allocation,
8288  void** ppData)
8289 {
8290  VMA_ASSERT(allocator && allocation && ppData);
8291 
8292  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8293 
8294  return allocator->Map(allocation, ppData);
8295 }
8296 
8297 void vmaUnmapMemory(
8298  VmaAllocator allocator,
8299  VmaAllocation allocation)
8300 {
8301  VMA_ASSERT(allocator && allocation);
8302 
8303  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8304 
8305  allocator->Unmap(allocation);
8306 }
8307 
8308 VkResult vmaDefragment(
8309  VmaAllocator allocator,
8310  VmaAllocation* pAllocations,
8311  size_t allocationCount,
8312  VkBool32* pAllocationsChanged,
8313  const VmaDefragmentationInfo *pDefragmentationInfo,
8314  VmaDefragmentationStats* pDefragmentationStats)
8315 {
8316  VMA_ASSERT(allocator && pAllocations);
8317 
8318  VMA_DEBUG_LOG("vmaDefragment");
8319 
8320  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8321 
8322  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8323 }
8324 
8325 VkResult vmaCreateBuffer(
8326  VmaAllocator allocator,
8327  const VkBufferCreateInfo* pBufferCreateInfo,
8328  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8329  VkBuffer* pBuffer,
8330  VmaAllocation* pAllocation,
8331  VmaAllocationInfo* pAllocationInfo)
8332 {
8333  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8334 
8335  VMA_DEBUG_LOG("vmaCreateBuffer");
8336 
8337  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8338 
8339  *pBuffer = VK_NULL_HANDLE;
8340  *pAllocation = VK_NULL_HANDLE;
8341 
8342  // 1. Create VkBuffer.
8343  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8344  allocator->m_hDevice,
8345  pBufferCreateInfo,
8346  allocator->GetAllocationCallbacks(),
8347  pBuffer);
8348  if(res >= 0)
8349  {
8350  // 2. vkGetBufferMemoryRequirements.
8351  VkMemoryRequirements vkMemReq = {};
8352  bool requiresDedicatedAllocation = false;
8353  bool prefersDedicatedAllocation = false;
8354  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8355  requiresDedicatedAllocation, prefersDedicatedAllocation);
8356 
8357  // Make sure alignment requirements for specific buffer usages reported
8358  // in Physical Device Properties are included in alignment reported by memory requirements.
8359  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8360  {
8361  VMA_ASSERT(vkMemReq.alignment %
8362  allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8363  }
8364  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8365  {
8366  VMA_ASSERT(vkMemReq.alignment %
8367  allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8368  }
8369  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8370  {
8371  VMA_ASSERT(vkMemReq.alignment %
8372  allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8373  }
8374 
8375  // 3. Allocate memory using allocator.
8376  res = allocator->AllocateMemory(
8377  vkMemReq,
8378  requiresDedicatedAllocation,
8379  prefersDedicatedAllocation,
8380  *pBuffer, // dedicatedBuffer
8381  VK_NULL_HANDLE, // dedicatedImage
8382  *pAllocationCreateInfo,
8383  VMA_SUBALLOCATION_TYPE_BUFFER,
8384  pAllocation);
8385  if(res >= 0)
8386  {
8387  // 3. Bind buffer with memory.
8388  res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8389  allocator->m_hDevice,
8390  *pBuffer,
8391  (*pAllocation)->GetMemory(),
8392  (*pAllocation)->GetOffset());
8393  if(res >= 0)
8394  {
8395  // All steps succeeded.
8396  if(pAllocationInfo != VMA_NULL)
8397  {
8398  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8399  }
8400  return VK_SUCCESS;
8401  }
8402  allocator->FreeMemory(*pAllocation);
8403  *pAllocation = VK_NULL_HANDLE;
8404  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8405  *pBuffer = VK_NULL_HANDLE;
8406  return res;
8407  }
8408  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8409  *pBuffer = VK_NULL_HANDLE;
8410  return res;
8411  }
8412  return res;
8413 }
8414 
8415 void vmaDestroyBuffer(
8416  VmaAllocator allocator,
8417  VkBuffer buffer,
8418  VmaAllocation allocation)
8419 {
8420  if(buffer != VK_NULL_HANDLE)
8421  {
8422  VMA_ASSERT(allocator);
8423 
8424  VMA_DEBUG_LOG("vmaDestroyBuffer");
8425 
8426  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8427 
8428  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8429 
8430  allocator->FreeMemory(allocation);
8431  }
8432 }
8433 
8434 VkResult vmaCreateImage(
8435  VmaAllocator allocator,
8436  const VkImageCreateInfo* pImageCreateInfo,
8437  const VmaAllocationCreateInfo* pAllocationCreateInfo,
8438  VkImage* pImage,
8439  VmaAllocation* pAllocation,
8440  VmaAllocationInfo* pAllocationInfo)
8441 {
8442  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8443 
8444  VMA_DEBUG_LOG("vmaCreateImage");
8445 
8446  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8447 
8448  *pImage = VK_NULL_HANDLE;
8449  *pAllocation = VK_NULL_HANDLE;
8450 
8451  // 1. Create VkImage.
8452  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8453  allocator->m_hDevice,
8454  pImageCreateInfo,
8455  allocator->GetAllocationCallbacks(),
8456  pImage);
8457  if(res >= 0)
8458  {
8459  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8460  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8461  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8462 
8463  // 2. Allocate memory using allocator.
8464  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8465  if(res >= 0)
8466  {
8467  // 3. Bind image with memory.
8468  res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8469  allocator->m_hDevice,
8470  *pImage,
8471  (*pAllocation)->GetMemory(),
8472  (*pAllocation)->GetOffset());
8473  if(res >= 0)
8474  {
8475  // All steps succeeded.
8476  if(pAllocationInfo != VMA_NULL)
8477  {
8478  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8479  }
8480  return VK_SUCCESS;
8481  }
8482  allocator->FreeMemory(*pAllocation);
8483  *pAllocation = VK_NULL_HANDLE;
8484  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8485  *pImage = VK_NULL_HANDLE;
8486  return res;
8487  }
8488  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8489  *pImage = VK_NULL_HANDLE;
8490  return res;
8491  }
8492  return res;
8493 }
8494 
8495 void vmaDestroyImage(
8496  VmaAllocator allocator,
8497  VkImage image,
8498  VmaAllocation allocation)
8499 {
8500  if(image != VK_NULL_HANDLE)
8501  {
8502  VMA_ASSERT(allocator);
8503 
8504  VMA_DEBUG_LOG("vmaDestroyImage");
8505 
8506  VMA_DEBUG_GLOBAL_MUTEX_LOCK
8507 
8508  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8509 
8510  allocator->FreeMemory(allocation);
8511  }
8512 }
8513 
8514 #endif // #ifdef VMA_IMPLEMENTATION
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:758
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1005
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:783
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:768
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:968
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:762
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1273
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:780
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1439
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1143
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1197
Definition: vk_mem_alloc.h:1042
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:751
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1080
Definition: vk_mem_alloc.h:989
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:795
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:848
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:777
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MiB...
Definition: vk_mem_alloc.h:792
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:993
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:913
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:765
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:912
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:773
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1443
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:812
VmaStatInfo total
Definition: vk_mem_alloc.h:922
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1451
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1064
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1434
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:766
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:693
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:786
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1151
Definition: vk_mem_alloc.h:1145
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1283
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:763
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1101
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1167
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1203
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:749
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1154
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:950
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1429
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1447
Definition: vk_mem_alloc.h:983
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1088
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:764
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:918
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:699
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:720
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:725
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1449
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1075
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1213
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:759
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:901
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1162
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:712
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1049
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:914
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:716
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1157
Definition: vk_mem_alloc.h:988
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1070
Definition: vk_mem_alloc.h:1061
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:904
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:761
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1175
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:798
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1206
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1059
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1094
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:836
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:920
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1029
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:913
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:770
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:714
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:769
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1189
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1297
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:789
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:913
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:910
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1194
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1278
Definition: vk_mem_alloc.h:1057
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1445
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:757
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:772
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:908
Definition: vk_mem_alloc.h:955
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1147
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:906
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:767
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:771
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1016
Definition: vk_mem_alloc.h:977
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1292
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:747
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:760
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1259
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1125
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:914
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:921
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1200
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:914
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1264