Update for Vulkan-Docs 1.3.265

This commit is contained in:
Jon Leech 2023-09-23 01:25:56 -07:00 committed by Jon Leech
parent 4f51aac14f
commit df60f03168
7 changed files with 38092 additions and 18520 deletions

View file

@ -15,6 +15,7 @@ module;
#include <vulkan/vulkan_format_traits.hpp>
#include <vulkan/vulkan_hash.hpp>
#include <vulkan/vulkan_raii.hpp>
#include <vulkan/vulkan_shared.hpp>
export module vulkan_hpp;
@ -44,6 +45,7 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::ArrayProxy;
using VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries;
using VULKAN_HPP_NAMESPACE::Optional;
using VULKAN_HPP_NAMESPACE::SharedHandle;
using VULKAN_HPP_NAMESPACE::StridedArrayProxy;
using VULKAN_HPP_NAMESPACE::StructureChain;
using VULKAN_HPP_NAMESPACE::UniqueHandle;
@ -51,9 +53,13 @@ export namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
using VULKAN_HPP_NAMESPACE::ObjectDestroy;
using VULKAN_HPP_NAMESPACE::ObjectDestroyShared;
using VULKAN_HPP_NAMESPACE::ObjectFree;
using VULKAN_HPP_NAMESPACE::ObjectFreeShared;
using VULKAN_HPP_NAMESPACE::ObjectRelease;
using VULKAN_HPP_NAMESPACE::ObjectReleaseShared;
using VULKAN_HPP_NAMESPACE::PoolFree;
using VULKAN_HPP_NAMESPACE::PoolFreeShared;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
//==================
@ -940,40 +946,40 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::MaxDriverNameSize;
//=== VK_KHR_device_group_creation ===
using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSizeKhr;
using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSizeKHR;
//=== VK_KHR_external_memory_capabilities ===
using VULKAN_HPP_NAMESPACE::LuidSizeKhr;
using VULKAN_HPP_NAMESPACE::LuidSizeKHR;
//=== VK_KHR_external_memory ===
using VULKAN_HPP_NAMESPACE::QueueFamilyExternalKhr;
using VULKAN_HPP_NAMESPACE::QueueFamilyExternalKHR;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_AMDX_shader_enqueue ===
using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAmdx;
using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAMDX;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_KHR_ray_tracing_pipeline ===
using VULKAN_HPP_NAMESPACE::ShaderUnusedKhr;
using VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
//=== VK_NV_ray_tracing ===
using VULKAN_HPP_NAMESPACE::ShaderUnusedNv;
using VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
//=== VK_KHR_global_priority ===
using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKhr;
using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKHR;
//=== VK_KHR_driver_properties ===
using VULKAN_HPP_NAMESPACE::MaxDriverInfoSizeKhr;
using VULKAN_HPP_NAMESPACE::MaxDriverNameSizeKhr;
using VULKAN_HPP_NAMESPACE::MaxDriverInfoSizeKHR;
using VULKAN_HPP_NAMESPACE::MaxDriverNameSizeKHR;
//=== VK_EXT_global_priority_query ===
using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeExt;
using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeEXT;
//=== VK_EXT_image_sliced_view_of_3d ===
using VULKAN_HPP_NAMESPACE::Remaining3DSlicesExt;
using VULKAN_HPP_NAMESPACE::Remaining3DSlicesEXT;
//=== VK_EXT_shader_module_identifier ===
using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeExt;
using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeEXT;
//========================
//=== CONSTEXPR VALUEs ===
@ -2887,6 +2893,104 @@ export namespace VULKAN_HPP_NAMESPACE
using VULKAN_HPP_NAMESPACE::UniqueShaderEXT;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
//======================
//=== SHARED HANDLEs ===
//======================
#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
//=== VK_VERSION_1_0 ===
using VULKAN_HPP_NAMESPACE::SharedBuffer;
using VULKAN_HPP_NAMESPACE::SharedBufferView;
using VULKAN_HPP_NAMESPACE::SharedCommandBuffer;
using VULKAN_HPP_NAMESPACE::SharedCommandPool;
using VULKAN_HPP_NAMESPACE::SharedDescriptorPool;
using VULKAN_HPP_NAMESPACE::SharedDescriptorSet;
using VULKAN_HPP_NAMESPACE::SharedDescriptorSetLayout;
using VULKAN_HPP_NAMESPACE::SharedDevice;
using VULKAN_HPP_NAMESPACE::SharedDeviceMemory;
using VULKAN_HPP_NAMESPACE::SharedEvent;
using VULKAN_HPP_NAMESPACE::SharedFence;
using VULKAN_HPP_NAMESPACE::SharedFramebuffer;
using VULKAN_HPP_NAMESPACE::SharedImage;
using VULKAN_HPP_NAMESPACE::SharedImageView;
using VULKAN_HPP_NAMESPACE::SharedInstance;
using VULKAN_HPP_NAMESPACE::SharedPhysicalDevice;
using VULKAN_HPP_NAMESPACE::SharedPipeline;
using VULKAN_HPP_NAMESPACE::SharedPipelineCache;
using VULKAN_HPP_NAMESPACE::SharedPipelineLayout;
using VULKAN_HPP_NAMESPACE::SharedQueryPool;
using VULKAN_HPP_NAMESPACE::SharedQueue;
using VULKAN_HPP_NAMESPACE::SharedRenderPass;
using VULKAN_HPP_NAMESPACE::SharedSampler;
using VULKAN_HPP_NAMESPACE::SharedSemaphore;
using VULKAN_HPP_NAMESPACE::SharedShaderModule;
//=== VK_VERSION_1_1 ===
using VULKAN_HPP_NAMESPACE::SharedDescriptorUpdateTemplate;
using VULKAN_HPP_NAMESPACE::SharedSamplerYcbcrConversion;
//=== VK_VERSION_1_3 ===
using VULKAN_HPP_NAMESPACE::SharedPrivateDataSlot;
//=== VK_KHR_surface ===
using VULKAN_HPP_NAMESPACE::SharedSurfaceKHR;
//=== VK_KHR_swapchain ===
using VULKAN_HPP_NAMESPACE::SharedSwapchainKHR;
//=== VK_KHR_display ===
using VULKAN_HPP_NAMESPACE::SharedDisplayKHR;
using VULKAN_HPP_NAMESPACE::SharedDisplayModeKHR;
//=== VK_EXT_debug_report ===
using VULKAN_HPP_NAMESPACE::SharedDebugReportCallbackEXT;
//=== VK_KHR_video_queue ===
using VULKAN_HPP_NAMESPACE::SharedVideoSessionKHR;
using VULKAN_HPP_NAMESPACE::SharedVideoSessionParametersKHR;
//=== VK_NVX_binary_import ===
using VULKAN_HPP_NAMESPACE::SharedCuFunctionNVX;
using VULKAN_HPP_NAMESPACE::SharedCuModuleNVX;
//=== VK_EXT_debug_utils ===
using VULKAN_HPP_NAMESPACE::SharedDebugUtilsMessengerEXT;
//=== VK_KHR_acceleration_structure ===
using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureKHR;
//=== VK_EXT_validation_cache ===
using VULKAN_HPP_NAMESPACE::SharedValidationCacheEXT;
//=== VK_NV_ray_tracing ===
using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureNV;
//=== VK_INTEL_performance_query ===
using VULKAN_HPP_NAMESPACE::SharedPerformanceConfigurationINTEL;
//=== VK_KHR_deferred_host_operations ===
using VULKAN_HPP_NAMESPACE::SharedDeferredOperationKHR;
//=== VK_NV_device_generated_commands ===
using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutNV;
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
using VULKAN_HPP_NAMESPACE::SharedBufferCollectionFUCHSIA;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_EXT_opacity_micromap ===
using VULKAN_HPP_NAMESPACE::SharedMicromapEXT;
//=== VK_NV_optical_flow ===
using VULKAN_HPP_NAMESPACE::SharedOpticalFlowSessionNV;
//=== VK_EXT_shader_object ===
using VULKAN_HPP_NAMESPACE::SharedHandleTraits;
using VULKAN_HPP_NAMESPACE::SharedShaderEXT;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
//===========================
//=== COMMAND Definitions ===
//===========================

View file

@ -55,7 +55,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span>
#endif
static_assert( VK_HEADER_VERSION == 264, "Wrong VK_HEADER_VERSION!" );
static_assert( VK_HEADER_VERSION == 265, "Wrong VK_HEADER_VERSION!" );
// <tuple> includes <sys/sysmacros.h> through some other header
// this results in major(x) being resolved to gnu_dev_major(x)
@ -6588,40 +6588,40 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE;
//=== VK_KHR_device_group_creation ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSizeKhr = VK_MAX_DEVICE_GROUP_SIZE_KHR;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSizeKHR = VK_MAX_DEVICE_GROUP_SIZE_KHR;
//=== VK_KHR_external_memory_capabilities ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSizeKhr = VK_LUID_SIZE_KHR;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSizeKHR = VK_LUID_SIZE_KHR;
//=== VK_KHR_external_memory ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternalKhr = VK_QUEUE_FAMILY_EXTERNAL_KHR;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternalKHR = VK_QUEUE_FAMILY_EXTERNAL_KHR;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_AMDX_shader_enqueue ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderIndexUnusedAmdx = VK_SHADER_INDEX_UNUSED_AMDX;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderIndexUnusedAMDX = VK_SHADER_INDEX_UNUSED_AMDX;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_KHR_ray_tracing_pipeline ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKHR = VK_SHADER_UNUSED_KHR;
//=== VK_NV_ray_tracing ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedNv = VK_SHADER_UNUSED_NV;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedNV = VK_SHADER_UNUSED_NV;
//=== VK_KHR_global_priority ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKHR = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR;
//=== VK_KHR_driver_properties ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSizeKhr = VK_MAX_DRIVER_NAME_SIZE_KHR;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSizeKhr = VK_MAX_DRIVER_INFO_SIZE_KHR;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSizeKHR = VK_MAX_DRIVER_NAME_SIZE_KHR;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSizeKHR = VK_MAX_DRIVER_INFO_SIZE_KHR;
//=== VK_EXT_global_priority_query ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeExt = VK_MAX_GLOBAL_PRIORITY_SIZE_EXT;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeEXT = VK_MAX_GLOBAL_PRIORITY_SIZE_EXT;
//=== VK_EXT_image_sliced_view_of_3d ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesEXT = VK_REMAINING_3D_SLICES_EXT;
//=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeExt = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT;
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeEXT = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT;
//========================
//=== CONSTEXPR VALUEs ===

View file

@ -69,7 +69,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
#define VK_HEADER_VERSION 264
#define VK_HEADER_VERSION 265
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)

View file

@ -0,0 +1,988 @@
// Copyright 2015-2023 The Khronos Group Inc.
//
// SPDX-License-Identifier: Apache-2.0 OR MIT
//
// This header is generated from the Khronos Vulkan XML API Registry.
#ifndef VULKAN_SHARED_HPP
#define VULKAN_SHARED_HPP
#include <atomic> // std::atomic_size_t
#include <vulkan/vulkan.hpp>
namespace VULKAN_HPP_NAMESPACE
{
#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
template <typename HandleType>
class SharedHandleTraits;
class NoDestructor
{
};
template <typename HandleType, typename = void>
struct HasDestructorType : std::false_type
{
};
template <typename HandleType>
struct HasDestructorType<HandleType, decltype( (void)typename SharedHandleTraits<HandleType>::DestructorType() )> : std::true_type
{
};
template <typename HandleType, typename Enable = void>
struct GetDestructorType
{
using type = NoDestructor;
};
template <typename HandleType>
struct GetDestructorType<HandleType, typename std::enable_if<HasDestructorType<HandleType>::value>::type>
{
using type = typename SharedHandleTraits<HandleType>::DestructorType;
};
template <class HandleType>
using DestructorTypeOf = typename GetDestructorType<HandleType>::type;
template <class HandleType>
struct HasDestructor : std::integral_constant<bool, !std::is_same<DestructorTypeOf<HandleType>, NoDestructor>::value>
{
};
//=====================================================================================================================
template <typename HandleType>
class SharedHandle;
template <typename DestructorType, typename Deleter>
struct SharedHeader
{
SharedHeader( SharedHandle<DestructorType> parent, Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
: parent( std::move( parent ) )
, deleter( std::move( deleter ) )
{
}
SharedHandle<DestructorType> parent;
Deleter deleter;
};
template <typename Deleter>
struct SharedHeader<NoDestructor, Deleter>
{
SharedHeader( Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT : deleter( std::move( deleter ) ) {}
Deleter deleter;
};
//=====================================================================================================================
template <typename HeaderType>
class ReferenceCounter
{
public:
template <typename... Args>
ReferenceCounter( Args &&... control_args ) : m_header( std::forward<Args>( control_args )... )
{
}
ReferenceCounter( const ReferenceCounter & ) = delete;
ReferenceCounter & operator=( const ReferenceCounter & ) = delete;
public:
size_t addRef() VULKAN_HPP_NOEXCEPT
{
// Relaxed memory order is sufficient since this does not impose any ordering on other operations
return m_ref_cnt.fetch_add( 1, std::memory_order_relaxed );
}
size_t release() VULKAN_HPP_NOEXCEPT
{
// A release memory order to ensure that all releases are ordered
return m_ref_cnt.fetch_sub( 1, std::memory_order_release );
}
public:
std::atomic_size_t m_ref_cnt{ 1 };
HeaderType m_header{};
};
//=====================================================================================================================
template <typename HandleType, typename HeaderType, typename ForwardType = SharedHandle<HandleType>>
class SharedHandleBase
{
public:
SharedHandleBase() = default;
template <typename... Args>
SharedHandleBase( HandleType handle, Args &&... control_args )
: m_control( new ReferenceCounter<HeaderType>( std::forward<Args>( control_args )... ) ), m_handle( handle )
{
}
SharedHandleBase( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT
{
o.addRef();
m_handle = o.m_handle;
m_control = o.m_control;
}
SharedHandleBase( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT
: m_control( o.m_control )
, m_handle( o.m_handle )
{
o.m_handle = nullptr;
o.m_control = nullptr;
}
SharedHandleBase & operator=( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT
{
SharedHandleBase( o ).swap( *this );
return *this;
}
SharedHandleBase & operator=( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT
{
SharedHandleBase( std::move( o ) ).swap( *this );
return *this;
}
~SharedHandleBase()
{
// only this function owns the last reference to the control block
// the same principle is used in the default deleter of std::shared_ptr
if ( m_control && ( m_control->release() == 1 ) )
{
// noop in x86, but does thread synchronization in ARM
// it is required to ensure that last thread is getting to destroy the control block
// by ordering all atomic operations before this fence
std::atomic_thread_fence( std::memory_order_acquire );
ForwardType::internalDestroy( getHeader(), m_handle );
delete m_control;
}
}
public:
HandleType get() const VULKAN_HPP_NOEXCEPT
{
return m_handle;
}
HandleType operator*() const VULKAN_HPP_NOEXCEPT
{
return m_handle;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return bool( m_handle );
}
const HandleType * operator->() const VULKAN_HPP_NOEXCEPT
{
return &m_handle;
}
HandleType * operator->() VULKAN_HPP_NOEXCEPT
{
return &m_handle;
}
void reset() VULKAN_HPP_NOEXCEPT
{
SharedHandleBase().swap( *this );
}
void swap( SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_handle, o.m_handle );
std::swap( m_control, o.m_control );
}
template <typename T = HandleType>
typename std::enable_if<HasDestructor<T>::value, const SharedHandle<DestructorTypeOf<HandleType>> &>::type getDestructorType() const VULKAN_HPP_NOEXCEPT
{
return getHeader().parent;
}
protected:
template <typename T = HandleType>
static typename std::enable_if<!HasDestructor<T>::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT
{
control.deleter.destroy( handle );
}
template <typename T = HandleType>
static typename std::enable_if<HasDestructor<T>::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT
{
control.deleter.destroy( control.parent.get(), handle );
}
const HeaderType & getHeader() const VULKAN_HPP_NOEXCEPT
{
return m_control->m_header;
}
private:
void addRef() const VULKAN_HPP_NOEXCEPT
{
if ( m_control )
m_control->addRef();
}
protected:
ReferenceCounter<HeaderType> * m_control = nullptr;
HandleType m_handle{};
};
template <typename HandleType>
class SharedHandle : public SharedHandleBase<HandleType, SharedHeader<DestructorTypeOf<HandleType>, typename SharedHandleTraits<HandleType>::deleter>>
{
private:
using BaseType = SharedHandleBase<HandleType, SharedHeader<DestructorTypeOf<HandleType>, typename SharedHandleTraits<HandleType>::deleter>>;
using DeleterType = typename SharedHandleTraits<HandleType>::deleter;
friend BaseType;
public:
SharedHandle() = default;
template <typename T = HandleType, typename = typename std::enable_if<HasDestructor<T>::value>::type>
explicit SharedHandle( HandleType handle, SharedHandle<DestructorTypeOf<HandleType>> parent, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT
: BaseType( handle, std::move( parent ), std::move( deleter ) )
{
}
template <typename T = HandleType, typename = typename std::enable_if<!HasDestructor<T>::value>::type>
explicit SharedHandle( HandleType handle, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT : BaseType( handle, std::move( deleter ) )
{
}
protected:
using BaseType::internalDestroy;
};
template <typename HandleType>
class SharedHandleTraits;
// Silence the function cast warnings.
# if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER )
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wcast-function-type"
# endif
template <typename HandleType>
class ObjectDestroyShared
{
public:
using DestructorType = typename SharedHandleTraits<HandleType>::DestructorType;
template <class Dispatcher>
using DestroyFunctionPointerType =
typename std::conditional<HasDestructor<HandleType>::value,
void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const,
void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type;
using SelectorType = typename std::conditional<HasDestructor<HandleType>::value, DestructorType, HandleType>::type;
template <typename Dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
ObjectDestroyShared( Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
: m_destroy( reinterpret_cast<decltype( m_destroy )>( static_cast<DestroyFunctionPointerType<Dispatcher>>( &SelectorType::destroy ) ) )
, m_dispatch( &dispatch )
, m_allocationCallbacks( allocationCallbacks )
{
}
public:
template <typename T = HandleType>
typename std::enable_if<HasDestructor<T>::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch );
}
template <typename T = HandleType>
typename std::enable_if<!HasDestructor<T>::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch );
}
private:
DestroyFunctionPointerType<DispatchLoaderBase> m_destroy = nullptr;
const DispatchLoaderBase * m_dispatch = nullptr;
Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
};
template <typename HandleType>
class ObjectFreeShared
{
public:
using DestructorType = typename SharedHandleTraits<HandleType>::DestructorType;
template <class Dispatcher>
using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const;
template <class Dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
ObjectFreeShared( Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
: m_destroy( reinterpret_cast<decltype( m_destroy )>( static_cast<DestroyFunctionPointerType<Dispatcher>>( &DestructorType::free ) ) )
, m_dispatch( &dispatch )
, m_allocationCallbacks( allocationCallbacks )
{
}
public:
void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch );
}
private:
DestroyFunctionPointerType<DispatchLoaderBase> m_destroy = nullptr;
const DispatchLoaderBase * m_dispatch = nullptr;
Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
};
template <typename HandleType>
class ObjectReleaseShared
{
public:
using DestructorType = typename SharedHandleTraits<HandleType>::DestructorType;
template <class Dispatcher>
using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const;
template <class Dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
: m_destroy( reinterpret_cast<decltype( m_destroy )>( static_cast<DestroyFunctionPointerType<Dispatcher>>( &DestructorType::release ) ) )
, m_dispatch( &dispatch )
{
}
public:
void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
( parent.*m_destroy )( handle, *m_dispatch );
}
private:
DestroyFunctionPointerType<DispatchLoaderBase> m_destroy = nullptr;
const DispatchLoaderBase * m_dispatch = nullptr;
};
template <typename HandleType, typename PoolType>
class PoolFreeShared
{
public:
using DestructorType = typename SharedHandleTraits<HandleType>::DestructorType;
template <class Dispatcher>
using ReturnType = decltype( std::declval<DestructorType>().free( PoolType(), 0u, nullptr, Dispatcher() ) );
template <class Dispatcher>
using DestroyFunctionPointerType = ReturnType<Dispatcher> ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const;
PoolFreeShared() = default;
template <class Dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
PoolFreeShared( SharedHandle<PoolType> pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
: m_destroy( reinterpret_cast<decltype( m_destroy )>( static_cast<DestroyFunctionPointerType<Dispatcher>>( &DestructorType::free ) ) )
, m_dispatch( &dispatch )
, m_pool( std::move( pool ) )
{
}
public:
void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch );
}
private:
DestroyFunctionPointerType<DispatchLoaderBase> m_destroy = nullptr;
const DispatchLoaderBase * m_dispatch = nullptr;
SharedHandle<PoolType> m_pool{};
};
# if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER )
# pragma GCC diagnostic pop
# endif
//======================
//=== SHARED HANDLEs ===
//======================
//=== VK_VERSION_1_0 ===
template <>
class SharedHandleTraits<Instance>
{
public:
using DestructorType = NoDestructor;
using deleter = ObjectDestroyShared<Instance>;
};
using SharedInstance = SharedHandle<Instance>;
template <>
class SharedHandleTraits<Device>
{
public:
using DestructorType = NoDestructor;
using deleter = ObjectDestroyShared<Device>;
};
using SharedDevice = SharedHandle<Device>;
template <>
class SharedHandleTraits<DeviceMemory>
{
public:
using DestructorType = Device;
using deleter = ObjectFreeShared<DeviceMemory>;
};
using SharedDeviceMemory = SharedHandle<DeviceMemory>;
template <>
class SharedHandleTraits<Fence>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<Fence>;
};
using SharedFence = SharedHandle<Fence>;
template <>
class SharedHandleTraits<Semaphore>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<Semaphore>;
};
using SharedSemaphore = SharedHandle<Semaphore>;
template <>
class SharedHandleTraits<Event>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<Event>;
};
using SharedEvent = SharedHandle<Event>;
template <>
class SharedHandleTraits<QueryPool>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<QueryPool>;
};
using SharedQueryPool = SharedHandle<QueryPool>;
template <>
class SharedHandleTraits<Buffer>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<Buffer>;
};
using SharedBuffer = SharedHandle<Buffer>;
template <>
class SharedHandleTraits<BufferView>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<BufferView>;
};
using SharedBufferView = SharedHandle<BufferView>;
template <>
class SharedHandleTraits<Image>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<Image>;
};
using SharedImage = SharedHandle<Image>;
template <>
class SharedHandleTraits<ImageView>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<ImageView>;
};
using SharedImageView = SharedHandle<ImageView>;
template <>
class SharedHandleTraits<ShaderModule>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<ShaderModule>;
};
using SharedShaderModule = SharedHandle<ShaderModule>;
template <>
class SharedHandleTraits<PipelineCache>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<PipelineCache>;
};
using SharedPipelineCache = SharedHandle<PipelineCache>;
template <>
class SharedHandleTraits<Pipeline>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<Pipeline>;
};
using SharedPipeline = SharedHandle<Pipeline>;
template <>
class SharedHandleTraits<PipelineLayout>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<PipelineLayout>;
};
using SharedPipelineLayout = SharedHandle<PipelineLayout>;
template <>
class SharedHandleTraits<Sampler>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<Sampler>;
};
using SharedSampler = SharedHandle<Sampler>;
template <>
class SharedHandleTraits<DescriptorPool>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<DescriptorPool>;
};
using SharedDescriptorPool = SharedHandle<DescriptorPool>;
template <>
class SharedHandleTraits<DescriptorSet>
{
public:
using DestructorType = Device;
using deleter = PoolFreeShared<DescriptorSet, DescriptorPool>;
};
using SharedDescriptorSet = SharedHandle<DescriptorSet>;
template <>
class SharedHandleTraits<DescriptorSetLayout>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<DescriptorSetLayout>;
};
using SharedDescriptorSetLayout = SharedHandle<DescriptorSetLayout>;
template <>
class SharedHandleTraits<Framebuffer>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<Framebuffer>;
};
using SharedFramebuffer = SharedHandle<Framebuffer>;
template <>
class SharedHandleTraits<RenderPass>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<RenderPass>;
};
using SharedRenderPass = SharedHandle<RenderPass>;
template <>
class SharedHandleTraits<CommandPool>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<CommandPool>;
};
using SharedCommandPool = SharedHandle<CommandPool>;
template <>
class SharedHandleTraits<CommandBuffer>
{
public:
using DestructorType = Device;
using deleter = PoolFreeShared<CommandBuffer, CommandPool>;
};
using SharedCommandBuffer = SharedHandle<CommandBuffer>;
//=== VK_VERSION_1_1 ===
template <>
class SharedHandleTraits<SamplerYcbcrConversion>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<SamplerYcbcrConversion>;
};
using SharedSamplerYcbcrConversion = SharedHandle<SamplerYcbcrConversion>;
using SharedSamplerYcbcrConversionKHR = SharedHandle<SamplerYcbcrConversion>;
template <>
class SharedHandleTraits<DescriptorUpdateTemplate>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<DescriptorUpdateTemplate>;
};
using SharedDescriptorUpdateTemplate = SharedHandle<DescriptorUpdateTemplate>;
using SharedDescriptorUpdateTemplateKHR = SharedHandle<DescriptorUpdateTemplate>;
//=== VK_VERSION_1_3 ===
template <>
class SharedHandleTraits<PrivateDataSlot>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<PrivateDataSlot>;
};
using SharedPrivateDataSlot = SharedHandle<PrivateDataSlot>;
using SharedPrivateDataSlotEXT = SharedHandle<PrivateDataSlot>;
//=== VK_KHR_surface ===
template <>
class SharedHandleTraits<SurfaceKHR>
{
public:
using DestructorType = Instance;
using deleter = ObjectDestroyShared<SurfaceKHR>;
};
using SharedSurfaceKHR = SharedHandle<SurfaceKHR>;
//=== VK_KHR_swapchain ===
template <>
class SharedHandleTraits<SwapchainKHR>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<SwapchainKHR>;
};
using SharedSwapchainKHR = SharedHandle<SwapchainKHR>;
//=== VK_EXT_debug_report ===
template <>
class SharedHandleTraits<DebugReportCallbackEXT>
{
public:
using DestructorType = Instance;
using deleter = ObjectDestroyShared<DebugReportCallbackEXT>;
};
using SharedDebugReportCallbackEXT = SharedHandle<DebugReportCallbackEXT>;
//=== VK_KHR_video_queue ===
template <>
class SharedHandleTraits<VideoSessionKHR>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<VideoSessionKHR>;
};
using SharedVideoSessionKHR = SharedHandle<VideoSessionKHR>;
template <>
class SharedHandleTraits<VideoSessionParametersKHR>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<VideoSessionParametersKHR>;
};
using SharedVideoSessionParametersKHR = SharedHandle<VideoSessionParametersKHR>;
//=== VK_NVX_binary_import ===
template <>
class SharedHandleTraits<CuModuleNVX>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<CuModuleNVX>;
};
using SharedCuModuleNVX = SharedHandle<CuModuleNVX>;
template <>
class SharedHandleTraits<CuFunctionNVX>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<CuFunctionNVX>;
};
using SharedCuFunctionNVX = SharedHandle<CuFunctionNVX>;
//=== VK_EXT_debug_utils ===
template <>
class SharedHandleTraits<DebugUtilsMessengerEXT>
{
public:
using DestructorType = Instance;
using deleter = ObjectDestroyShared<DebugUtilsMessengerEXT>;
};
using SharedDebugUtilsMessengerEXT = SharedHandle<DebugUtilsMessengerEXT>;
//=== VK_KHR_acceleration_structure ===
template <>
class SharedHandleTraits<AccelerationStructureKHR>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<AccelerationStructureKHR>;
};
using SharedAccelerationStructureKHR = SharedHandle<AccelerationStructureKHR>;
//=== VK_EXT_validation_cache ===
template <>
class SharedHandleTraits<ValidationCacheEXT>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<ValidationCacheEXT>;
};
using SharedValidationCacheEXT = SharedHandle<ValidationCacheEXT>;
//=== VK_NV_ray_tracing ===
template <>
class SharedHandleTraits<AccelerationStructureNV>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<AccelerationStructureNV>;
};
using SharedAccelerationStructureNV = SharedHandle<AccelerationStructureNV>;
//=== VK_KHR_deferred_host_operations ===
template <>
class SharedHandleTraits<DeferredOperationKHR>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<DeferredOperationKHR>;
};
using SharedDeferredOperationKHR = SharedHandle<DeferredOperationKHR>;
//=== VK_NV_device_generated_commands ===
template <>
class SharedHandleTraits<IndirectCommandsLayoutNV>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<IndirectCommandsLayoutNV>;
};
using SharedIndirectCommandsLayoutNV = SharedHandle<IndirectCommandsLayoutNV>;
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
template <>
class SharedHandleTraits<BufferCollectionFUCHSIA>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<BufferCollectionFUCHSIA>;
};
using SharedBufferCollectionFUCHSIA = SharedHandle<BufferCollectionFUCHSIA>;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_EXT_opacity_micromap ===
template <>
class SharedHandleTraits<MicromapEXT>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<MicromapEXT>;
};
using SharedMicromapEXT = SharedHandle<MicromapEXT>;
//=== VK_NV_optical_flow ===
template <>
class SharedHandleTraits<OpticalFlowSessionNV>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<OpticalFlowSessionNV>;
};
using SharedOpticalFlowSessionNV = SharedHandle<OpticalFlowSessionNV>;
//=== VK_EXT_shader_object ===
template <>
class SharedHandleTraits<ShaderEXT>
{
public:
using DestructorType = Device;
using deleter = ObjectDestroyShared<ShaderEXT>;
};
using SharedShaderEXT = SharedHandle<ShaderEXT>;
enum class SwapchainOwns
{
no,
yes,
};
struct ImageHeader : SharedHeader<DestructorTypeOf<VULKAN_HPP_NAMESPACE::Image>, typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter>
{
ImageHeader(
SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::Image>> parent,
typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter deleter = typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter(),
SwapchainOwns swapchainOwned = SwapchainOwns::no ) VULKAN_HPP_NOEXCEPT
: SharedHeader<DestructorTypeOf<VULKAN_HPP_NAMESPACE::Image>, typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter>( std::move( parent ),
std::move( deleter ) )
, swapchainOwned( swapchainOwned )
{
}
SwapchainOwns swapchainOwned = SwapchainOwns::no;
};
template <>
class SharedHandle<VULKAN_HPP_NAMESPACE::Image> : public SharedHandleBase<VULKAN_HPP_NAMESPACE::Image, ImageHeader>
{
using BaseType = SharedHandleBase<VULKAN_HPP_NAMESPACE::Image, ImageHeader>;
using DeleterType = typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter;
friend BaseType;
public:
SharedHandle() = default;
explicit SharedHandle( VULKAN_HPP_NAMESPACE::Image handle,
SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::Image>> parent,
SwapchainOwns swapchain_owned = SwapchainOwns::no,
DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT
: BaseType( handle, std::move( parent ), std::move( deleter ), swapchain_owned )
{
}
protected:
static void internalDestroy( const ImageHeader & control, VULKAN_HPP_NAMESPACE::Image handle ) VULKAN_HPP_NOEXCEPT
{
if ( control.swapchainOwned == SwapchainOwns::no )
{
control.deleter.destroy( control.parent.get(), handle );
}
}
};
struct SwapchainHeader
{
SwapchainHeader( SharedHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR> surface,
SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::SwapchainKHR>> parent,
typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::SwapchainKHR>::deleter deleter =
typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::SwapchainKHR>::deleter() ) VULKAN_HPP_NOEXCEPT
: surface( std::move( surface ) )
, parent( std::move( parent ) )
, deleter( std::move( deleter ) )
{
}
SharedHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR> surface{};
SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::SwapchainKHR>> parent{};
typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::SwapchainKHR>::deleter deleter{};
};
template <>
class SharedHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR> : public SharedHandleBase<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainHeader>
{
using BaseType = SharedHandleBase<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainHeader>;
using DeleterType = typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::SwapchainKHR>::deleter;
friend BaseType;
public:
SharedHandle() = default;
explicit SharedHandle( VULKAN_HPP_NAMESPACE::SwapchainKHR handle,
SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::SwapchainKHR>> parent,
SharedHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR> surface,
DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT
: BaseType( handle, std::move( surface ), std::move( parent ), std::move( deleter ) )
{
}
public:
const SharedHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR> & getSurface() const VULKAN_HPP_NOEXCEPT
{
return getHeader().surface;
}
protected:
using BaseType::internalDestroy;
};
template <typename HandleType, typename DestructorType>
class SharedHandleBaseNoDestroy : public SharedHandleBase<HandleType, DestructorType>
{
public:
using SharedHandleBase<HandleType, DestructorType>::SharedHandleBase;
const DestructorType & getDestructorType() const VULKAN_HPP_NOEXCEPT
{
return SharedHandleBase<HandleType, DestructorType>::getHeader();
}
protected:
static void internalDestroy( const DestructorType &, HandleType ) VULKAN_HPP_NOEXCEPT {}
};
//=== VK_VERSION_1_0 ===
template <>
class SharedHandle<PhysicalDevice> : public SharedHandleBaseNoDestroy<PhysicalDevice, SharedInstance>
{
friend SharedHandleBase<PhysicalDevice, SharedInstance>;
public:
SharedHandle() = default;
explicit SharedHandle( PhysicalDevice handle, SharedInstance parent ) noexcept
: SharedHandleBaseNoDestroy<PhysicalDevice, SharedInstance>( handle, std::move( parent ) )
{
}
};
using SharedPhysicalDevice = SharedHandle<PhysicalDevice>;
template <>
class SharedHandle<Queue> : public SharedHandleBaseNoDestroy<Queue, SharedDevice>
{
friend SharedHandleBase<Queue, SharedDevice>;
public:
SharedHandle() = default;
explicit SharedHandle( Queue handle, SharedDevice parent ) noexcept : SharedHandleBaseNoDestroy<Queue, SharedDevice>( handle, std::move( parent ) ) {}
};
using SharedQueue = SharedHandle<Queue>;
//=== VK_KHR_display ===
template <>
class SharedHandle<DisplayKHR> : public SharedHandleBaseNoDestroy<DisplayKHR, SharedPhysicalDevice>
{
friend SharedHandleBase<DisplayKHR, SharedPhysicalDevice>;
public:
SharedHandle() = default;
explicit SharedHandle( DisplayKHR handle, SharedPhysicalDevice parent ) noexcept
: SharedHandleBaseNoDestroy<DisplayKHR, SharedPhysicalDevice>( handle, std::move( parent ) )
{
}
};
using SharedDisplayKHR = SharedHandle<DisplayKHR>;
template <>
class SharedHandle<DisplayModeKHR> : public SharedHandleBaseNoDestroy<DisplayModeKHR, SharedDisplayKHR>
{
friend SharedHandleBase<DisplayModeKHR, SharedDisplayKHR>;
public:
SharedHandle() = default;
explicit SharedHandle( DisplayModeKHR handle, SharedDisplayKHR parent ) noexcept
: SharedHandleBaseNoDestroy<DisplayModeKHR, SharedDisplayKHR>( handle, std::move( parent ) )
{
}
};
using SharedDisplayModeKHR = SharedHandle<DisplayModeKHR>;
//=== VK_INTEL_performance_query ===
template <>
class SharedHandle<PerformanceConfigurationINTEL> : public SharedHandleBaseNoDestroy<PerformanceConfigurationINTEL, SharedDevice>
{
friend SharedHandleBase<PerformanceConfigurationINTEL, SharedDevice>;
public:
SharedHandle() = default;
explicit SharedHandle( PerformanceConfigurationINTEL handle, SharedDevice parent ) noexcept
: SharedHandleBaseNoDestroy<PerformanceConfigurationINTEL, SharedDevice>( handle, std::move( parent ) )
{
}
};
using SharedPerformanceConfigurationINTEL = SharedHandle<PerformanceConfigurationINTEL>;
#endif // !VULKAN_HPP_NO_SMART_HANDLE
} // namespace VULKAN_HPP_NAMESPACE
#endif // VULKAN_SHARED_HPP

View file

@ -93684,10 +93684,10 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(
VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral,
uint32_t generalShader_ = {},
uint32_t closestHitShader_ = {},
uint32_t anyHitShader_ = {},
uint32_t intersectionShader_ = {},
uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR,
uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR,
uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR,
uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR,
const void * pShaderGroupCaptureReplayHandle_ = {},
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
@ -93814,10 +93814,10 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
uint32_t generalShader = {};
uint32_t closestHitShader = {};
uint32_t anyHitShader = {};
uint32_t intersectionShader = {};
uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
const void * pShaderGroupCaptureReplayHandle = {};
};
@ -94225,10 +94225,10 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(
VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral,
uint32_t generalShader_ = {},
uint32_t closestHitShader_ = {},
uint32_t anyHitShader_ = {},
uint32_t intersectionShader_ = {},
uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV,
uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV,
uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV,
uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV,
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, type( type_ )
@ -94344,10 +94344,10 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
uint32_t generalShader = {};
uint32_t closestHitShader = {};
uint32_t anyHitShader = {};
uint32_t intersectionShader = {};
uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
};
template <>

File diff suppressed because one or more lines are too long

View file

@ -175,7 +175,7 @@ branch of the member gitlab server.
#define <name>VKSC_API_VERSION_1_0</name> <type>VK_MAKE_API_VERSION</type>(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0</type>
<type api="vulkan" category="define">// Version of this file
#define <name>VK_HEADER_VERSION</name> 264</type>
#define <name>VK_HEADER_VERSION</name> 265</type>
<type api="vulkan" category="define" requires="VK_HEADER_VERSION">// Complete version of this file
#define <name>VK_HEADER_VERSION_COMPLETE</name> <type>VK_MAKE_API_VERSION</type>(0, 1, 3, VK_HEADER_VERSION)</type>
<type api="vulkansc" category="define">// Version of this file
@ -22815,8 +22815,9 @@ typedef void* <name>MTLSharedEvent_id</name>;
</extension>
<extension name="VK_EXT_extension_484" number="484" author="KHR" contact="Chris Glover @cdglove" supported="disabled">
<require>
<enum value="0" name="VK_EXT_EXTENSION_484_SPEC_VERSION"/>
<enum value="&quot;VK_EXT_extension_484&quot;" name="VK_EXT_EXTENSION_484_EXTENSION_NAME"/>
<enum value="0" name="VK_EXT_EXTENSION_484_SPEC_VERSION"/>
<enum value="&quot;VK_EXT_extension_484&quot;" name="VK_EXT_EXTENSION_484_EXTENSION_NAME"/>
<enum bitpos="31" extends="VkPipelineCreateFlagBits2KHR" name="VK_PIPELINE_CREATE_2_RESERVED_31_BIT_KHR"/>
</require>
</extension>
<extension name="VK_QCOM_tile_properties" number="485" type="device" depends="VK_KHR_get_physical_device_properties2" author="QCOM" contact="Jeff Leger @jackohound" supported="vulkan">
@ -23337,6 +23338,12 @@ typedef void* <name>MTLSharedEvent_id</name>;
<enum value="&quot;VK_QCOM_extension_548&quot;" name="VK_QCOM_EXTENSION_548_EXTENSION_NAME"/>
</require>
</extension>
<extension name="VK_NV_extension_549" number="549" author="NV" contact="Piers Daniell @pdaniell-nv" supported="disabled">
<require>
<enum value="0" name="VK_NV_EXTENSION_549_SPEC_VERSION"/>
<enum value="&quot;VK_NV_extension_549&quot;" name="VK_NV_EXTENSION_549_EXTENSION_NAME"/>
</require>
</extension>
</extensions>
<formats>
<format name="VK_FORMAT_R4G4_UNORM_PACK8" class="8-bit" blockSize="1" texelsPerBlock="1" packed="8">
@ -25501,7 +25508,7 @@ typedef void* <name>MTLSharedEvent_id</name>;
</syncstage>
<syncstage name="VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT" alias="VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT">
<syncsupport queues="graphics"/>
<syncequivalent stage="VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT,VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT,VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT,VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT,VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT,VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT,VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT,VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT,VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT,VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT,VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT,VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT,VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI,VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI,VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI"/>
<syncequivalent stage="VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT,VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT,VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT,VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT,VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT,VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT,VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT,VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT,VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT,VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT,VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT,VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT,VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI,VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI,VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI"/>
</syncstage>
<syncstage name="VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" alias="VK_PIPELINE_STAGE_ALL_COMMANDS_BIT">
</syncstage>